[Obs AI Assistant] Specify embedding model during onboarding for the Knowledge Base (#218448)

Closes https://github.com/elastic/obs-ai-assistant-team/issues/230
Closes https://github.com/elastic/obs-ai-assistant-team/issues/232

Related to https://github.com/elastic/kibana/pull/215591

## Summary

This PR implements the changes related to the first phase of supporing
multilingual Knowledge Base. The users have the ability to pick the
`e5-small` model for the Knowledge Base, if they want support for
languages other than English.

<img width="610" alt="image"
src="https://github.com/user-attachments/assets/4c815aa4-aa97-4845-98c5-e079dd92f23a"
/>

<img width="1281" alt="image"
src="https://github.com/user-attachments/assets/7c1bcd82-5464-497f-a053-7fe271da1cdd"
/>

<img width="1280" alt="image"
src="https://github.com/user-attachments/assets/bc084e90-c291-44ea-8560-e033729bfcca"
/>

When the KB model is not allocated due to nodes scaling down:


![image](https://github.com/user-attachments/assets/2f52e31e-81e4-4824-bc5b-b97df714da5c)


### Checklist

- [x] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/src/platform/packages/shared/kbn-i18n/README.md)
- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
- [ ] [Flaky Test
Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was
used on any tests changed
- [x] The PR description includes the appropriate Release Notes section,
and the correct `release_note:*` label is applied per the
[guidelines](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process)


## Upgrade testing steps

### 9.0 - 9.1 (main)

Checkout `9.0` branch and start Kibana and ES. ES must be started with
`path.data` to persist data:

```
yarn es snapshot --license trial --E path.data=/Users/sorenlouv/elastic/es_data/upgrade_test_9.0
```

---------

Co-authored-by: Søren Louv-Jansen <soren.louv@elastic.co>
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
Co-authored-by: Søren Louv-Jansen <sorenlouv@gmail.com>
This commit is contained in:
Viduni Wickramarachchi 2025-05-05 04:13:10 -04:00 committed by GitHub
parent 10692211bc
commit dc019f85e1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
169 changed files with 4312 additions and 2106 deletions

View file

@ -256,6 +256,19 @@ export class LockManager {
}
}
export async function getLock({
esClient,
logger,
lockId,
}: {
esClient: ElasticsearchClient;
logger: Logger;
lockId: LockId;
}): Promise<LockDocument | undefined> {
const lockManager = new LockManager(lockId, esClient, logger);
return lockManager.get();
}
export async function withLock<T>(
{
esClient,
@ -280,9 +293,7 @@ export async function withLock<T>(
// extend the ttl periodically
const extendInterval = Math.floor(ttl / 4);
logger.debug(
`Lock "${lockId}" acquired. Extending TTL every ${prettyMilliseconds(extendInterval)}`
);
logger.debug(`Extending TTL for lock "${lockId}" every ${prettyMilliseconds(extendInterval)}`);
let extendTTlPromise = Promise.resolve(true);
const intervalId = setInterval(() => {

View file

@ -8,7 +8,7 @@
*/
import { CoreSetup, Logger } from '@kbn/core/server';
import { LockId, withLock } from './lock_manager_client';
import { LockId, withLock, getLock } from './lock_manager_client';
export class LockManagerService {
constructor(private readonly coreSetup: CoreSetup<any>, private readonly logger: Logger) {}
@ -35,8 +35,16 @@ export class LockManagerService {
) {
const [coreStart] = await this.coreSetup.getStartServices();
const esClient = coreStart.elasticsearch.client.asInternalUser;
const logger = this.logger.get('LockManager');
const logger = this.logger.get('lock-manager');
return withLock<T>({ esClient, logger, lockId, metadata }, callback);
}
async getLock(lockId: LockId) {
const [coreStart] = await this.coreSetup.getStartServices();
const esClient = coreStart.elasticsearch.client.asInternalUser;
const logger = this.logger.get('lock-manager');
return getLock({ esClient, logger, lockId });
}
}

View file

@ -34,6 +34,9 @@ run(async ({ log }) => {
'src/platform/**',
'x-pack/platform/**',
'x-pack/solutions/**',
// ignore autogenerated snapshots
'x-pack/test/api_integration/deployment_agnostic/apis/observability/ai_assistant/knowledge_base/snapshots',
],
});

View file

@ -12,12 +12,11 @@ import type { ActionConnector } from '@kbn/triggers-actions-ui-plugin/public';
import { GenerativeAIForObservabilityConnectorFeatureId } from '@kbn/actions-plugin/common';
import { isSupportedConnectorType } from '@kbn/inference-common';
import { AssistantBeacon } from '@kbn/ai-assistant-icon';
import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public';
import type { UseKnowledgeBaseResult } from '../hooks/use_knowledge_base';
import type { UseGenAIConnectorsResult } from '../hooks/use_genai_connectors';
import { Disclaimer } from './disclaimer';
import { WelcomeMessageConnectors } from './welcome_message_connectors';
import { WelcomeMessageKnowledgeBase } from './welcome_message_knowledge_base';
import { WelcomeMessageKnowledgeBase } from '../knowledge_base/welcome_message_knowledge_base';
import { StarterPrompts } from './starter_prompts';
import { useKibana } from '../hooks/use_kibana';
@ -61,13 +60,6 @@ export function WelcomeMessage({
if (isSupportedConnectorType(createdConnector.actionTypeId)) {
connectors.reloadConnectors();
}
if (
!knowledgeBase.status.value ||
knowledgeBase.status.value?.kbState === KnowledgeBaseState.NOT_INSTALLED
) {
knowledgeBase.install();
}
};
const ConnectorFlyout = useMemo(

View file

@ -22,7 +22,7 @@ import { i18n } from '@kbn/i18n';
import { isHttpFetchError } from '@kbn/core-http-browser';
import type { UseGenAIConnectorsResult } from '../hooks/use_genai_connectors';
const fadeInAnimation = keyframes`
export const fadeInAnimation = keyframes`
from {
opacity: 0;
}

View file

@ -1,184 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { useEffect, useState } from 'react';
import { i18n } from '@kbn/i18n';
import {
EuiButton,
EuiButtonEmpty,
EuiFlexGroup,
EuiFlexItem,
EuiIcon,
EuiPopover,
EuiSpacer,
EuiText,
} from '@elastic/eui';
import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public';
import usePrevious from 'react-use/lib/usePrevious';
import { WelcomeMessageKnowledgeBaseSetupErrorPanel } from './welcome_message_knowledge_base_setup_error_panel';
import { UseKnowledgeBaseResult } from '../hooks';
const SettingUpKnowledgeBase = () => (
<>
<EuiText color="subdued" size="s">
{i18n.translate('xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel', {
defaultMessage:
'We are setting up your knowledge base. This may take a few minutes. You can continue to use the Assistant while this process is underway.',
})}
</EuiText>
<EuiSpacer size="m" />
<EuiButtonEmpty
data-test-subj="observabilityAiAssistantWelcomeMessageSettingUpKnowledgeBaseButton"
isLoading
onClick={() => {}}
>
{i18n.translate('xpack.aiAssistant.welcomeMessage.div.settingUpKnowledgeBaseLabel', {
defaultMessage: 'Setting up Knowledge base',
})}
</EuiButtonEmpty>
</>
);
const InspectKnowledgeBasePopover = ({
knowledgeBase,
}: {
knowledgeBase: UseKnowledgeBaseResult;
}) => {
// track whether the "inspect issues" popover is open
const [isPopoverOpen, setIsPopoverOpen] = useState(false);
const handleInstall = async () => {
setIsPopoverOpen(false);
await knowledgeBase.install();
};
return knowledgeBase.status.value?.modelStats ? (
<EuiFlexItem grow={false}>
<EuiPopover
button={
<EuiButtonEmpty
data-test-subj="observabilityAiAssistantWelcomeMessageInspectErrorsButton"
iconType="inspect"
onClick={() => setIsPopoverOpen(!isPopoverOpen)}
>
{i18n.translate('xpack.aiAssistant.welcomeMessage.inspectErrorsButtonEmptyLabel', {
defaultMessage: 'Inspect',
})}
</EuiButtonEmpty>
}
isOpen={isPopoverOpen}
panelPaddingSize="none"
closePopover={() => setIsPopoverOpen(false)}
>
<WelcomeMessageKnowledgeBaseSetupErrorPanel
knowledgeBase={knowledgeBase}
onRetryInstall={handleInstall}
/>
</EuiPopover>
</EuiFlexItem>
) : null;
};
export function WelcomeMessageKnowledgeBase({
knowledgeBase,
}: {
knowledgeBase: UseKnowledgeBaseResult;
}) {
const prevIsInstalling = usePrevious(knowledgeBase.isInstalling || knowledgeBase.isPolling);
const [showSuccessBanner, setShowSuccessBanner] = useState(false);
useEffect(() => {
if (prevIsInstalling) {
setShowSuccessBanner(true);
}
}, [knowledgeBase.isInstalling, prevIsInstalling]);
const install = async () => {
await knowledgeBase.install();
};
if (knowledgeBase.isInstalling) return <SettingUpKnowledgeBase />;
switch (knowledgeBase.status.value?.kbState) {
case KnowledgeBaseState.NOT_INSTALLED:
return (
<>
<EuiText color="subdued" size="s">
{i18n.translate(
'xpack.aiAssistant.welcomeMessageKnowledgeBase.yourKnowledgeBaseIsNotSetUpCorrectlyLabel',
{ defaultMessage: `Your Knowledge base hasn't been set up.` }
)}
</EuiText>
<EuiSpacer size="m" />
<EuiFlexGroup justifyContent="center">
<EuiFlexItem grow={false}>
<div>
<EuiButton
color="primary"
data-test-subj="observabilityAiAssistantWelcomeMessageSetUpKnowledgeBaseButton"
fill
isLoading={false}
iconType="importAction"
onClick={install}
>
{i18n.translate('xpack.aiAssistant.welcomeMessage.retryButtonLabel', {
defaultMessage: 'Install Knowledge base',
})}
</EuiButton>
</div>
</EuiFlexItem>
</EuiFlexGroup>
<EuiSpacer size="m" />
</>
);
case KnowledgeBaseState.DEPLOYING_MODEL:
case KnowledgeBaseState.PENDING_MODEL_DEPLOYMENT:
return (
<>
<SettingUpKnowledgeBase />
<InspectKnowledgeBasePopover knowledgeBase={knowledgeBase} />
</>
);
case KnowledgeBaseState.READY:
return showSuccessBanner ? (
<div>
<EuiFlexGroup alignItems="center" gutterSize="s" justifyContent="center">
<EuiFlexItem grow={false}>
<EuiIcon type="checkInCircleFilled" color="success" />
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiText color="subdued" size="s">
{i18n.translate(
'xpack.aiAssistant.welcomeMessage.knowledgeBaseSuccessfullyInstalledLabel',
{ defaultMessage: 'Knowledge base successfully installed' }
)}
</EuiText>
</EuiFlexItem>
</EuiFlexGroup>
</div>
) : null;
case KnowledgeBaseState.ERROR:
return (
<>
<EuiText color="subdued" size="s">
{i18n.translate('xpack.aiAssistant.welcomeMessage.SettingUpFailTextLabel', {
defaultMessage: `Knowledge Base setup failed. Check 'Inspect' for details.`,
})}
</EuiText>
<InspectKnowledgeBasePopover knowledgeBase={knowledgeBase} />
</>
);
default:
return null;
}
}

View file

@ -20,7 +20,12 @@ export function useKnowledgeBase(): UseKnowledgeBaseResult {
value: {
kbState: KnowledgeBaseState.NOT_INSTALLED,
enabled: true,
concreteWriteIndex: undefined,
currentInferenceId: undefined,
isReIndexing: false,
},
},
warmupModel: async () => {},
isWarmingUpModel: false,
};
}

View file

@ -0,0 +1,82 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { renderHook, act } from '@testing-library/react';
import { useInferenceEndpoints } from './use_inference_endpoints';
import { useAIAssistantAppService } from './use_ai_assistant_app_service';
jest.mock('./use_ai_assistant_app_service');
describe('useInferenceEndpoints', () => {
const mockCallApi = jest.fn();
beforeEach(() => {
jest.clearAllMocks();
(useAIAssistantAppService as jest.Mock).mockReturnValue({
callApi: mockCallApi,
});
});
it('fetches inference endpoints successfully on mount', async () => {
const mockResponse = {
endpoints: [
{ id: '1', name: 'Endpoint 1' },
{ id: '2', name: 'Endpoint 2' },
],
};
mockCallApi.mockResolvedValueOnce(mockResponse);
const { result } = renderHook(() => useInferenceEndpoints());
await act(async () => {
await Promise.resolve();
});
expect(mockCallApi).toHaveBeenCalledWith(
'GET /internal/observability_ai_assistant/kb/inference_endpoints',
{
signal: expect.any(AbortSignal),
}
);
expect(result.current.inferenceEndpoints).toEqual(mockResponse.endpoints);
expect(result.current.isLoading).toBe(false);
expect(result.current.error).toBeUndefined();
});
it('sets an error state on API errors', async () => {
const error = new Error('Something went wrong');
mockCallApi.mockRejectedValueOnce(error);
const { result } = renderHook(() => useInferenceEndpoints());
await act(async () => {
await Promise.resolve();
});
expect(result.current.inferenceEndpoints).toEqual([]);
expect(result.current.isLoading).toBe(false);
expect(result.current.error).toEqual(error);
});
it('ignores AbortError and does not set error state', async () => {
const abortError = new DOMException('Aborted', 'AbortError');
mockCallApi.mockRejectedValueOnce(abortError);
const { result } = renderHook(() => useInferenceEndpoints());
await act(async () => {
await Promise.resolve();
});
expect(result.current.inferenceEndpoints).toEqual([]);
expect(result.current.isLoading).toBe(false);
expect(result.current.error).toBeUndefined();
});
});

View file

@ -0,0 +1,56 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { useEffect, useState, useMemo, useCallback } from 'react';
import type { APIReturnType } from '@kbn/observability-ai-assistant-plugin/public';
import { useAIAssistantAppService } from './use_ai_assistant_app_service';
export function useInferenceEndpoints() {
const service = useAIAssistantAppService();
const [inferenceEndpoints, setInferenceEndpoints] = useState<
APIReturnType<'GET /internal/observability_ai_assistant/kb/inference_endpoints'>['endpoints']
>([]);
const [isLoading, setIsLoading] = useState(true);
const [error, setError] = useState<Error | undefined>(undefined);
const controller = useMemo(() => new AbortController(), []);
const fetchInferenceEndpoints = useCallback(async () => {
setIsLoading(true);
try {
const res = await service.callApi(
'GET /internal/observability_ai_assistant/kb/inference_endpoints',
{
signal: controller.signal,
}
);
setInferenceEndpoints(res.endpoints);
setError(undefined);
} catch (err) {
if (err.name !== 'AbortError') {
setError(err as Error);
setInferenceEndpoints([]);
}
} finally {
setIsLoading(false);
}
}, [controller, service]);
useEffect(() => {
fetchInferenceEndpoints();
return () => {
controller.abort();
};
}, [controller, fetchInferenceEndpoints]);
return { inferenceEndpoints, isLoading, error };
}

View file

@ -12,6 +12,9 @@ import { useAIAssistantAppService } from './use_ai_assistant_app_service';
jest.mock('./use_kibana');
jest.mock('./use_ai_assistant_app_service');
jest.mock('p-retry', () => {
return (fn: () => Promise<any>) => fn();
});
describe('useKnowledgeBase', () => {
const mockCallApi = jest.fn();
@ -73,7 +76,7 @@ describe('useKnowledgeBase', () => {
// Trigger setup
act(() => {
result.current.install();
result.current.install('.elser-2-elasticsearch');
});
// Verify that the install was called
@ -81,9 +84,30 @@ describe('useKnowledgeBase', () => {
expect(mockCallApi).toHaveBeenCalledWith(
'POST /internal/observability_ai_assistant/kb/setup',
{
params: {
query: {
inference_id: '.elser-2-elasticsearch',
},
},
signal: null,
}
);
});
});
it('shows an error toast on install failure', async () => {
const error = new Error('setup failed');
mockCallApi.mockResolvedValueOnce({ kbState: 'NOT_INSTALLED' }).mockRejectedValueOnce(error);
const { result } = renderHook(() => useKnowledgeBase());
await act(async () => {
await result.current.install('failing-id');
});
expect(mockAddError).toHaveBeenCalledWith(expect.any(Error), {
title: expect.any(String),
});
});
});

View file

@ -20,7 +20,9 @@ export interface UseKnowledgeBaseResult {
status: AbortableAsyncState<APIReturnType<'GET /internal/observability_ai_assistant/kb/status'>>;
isInstalling: boolean;
isPolling: boolean;
install: () => Promise<void>;
install: (inferenceId: string) => Promise<void>;
warmupModel: (inferenceId: string) => Promise<void>;
isWarmingUpModel: boolean;
}
export function useKnowledgeBase(): UseKnowledgeBaseResult {
@ -35,45 +37,90 @@ export function useKnowledgeBase(): UseKnowledgeBaseResult {
);
const [isInstalling, setIsInstalling] = useState(false);
const [isWarmingUpModel, setIsWarmingUpModel] = useState(false);
// poll for status when installing, until install is complete and the KB is ready
const isPolling =
!!statusRequest.value?.endpoint && statusRequest.value?.kbState !== KnowledgeBaseState.READY;
(isInstalling || isWarmingUpModel) && statusRequest.value?.kbState !== KnowledgeBaseState.READY;
useEffect(() => {
if (isInstalling && !!statusRequest.value?.endpoint) {
// toggle installing state to false once KB is ready
if (isInstalling && statusRequest.value?.kbState === KnowledgeBaseState.READY) {
setIsInstalling(false);
}
}, [isInstalling, statusRequest]);
const install = useCallback(async () => {
setIsInstalling(true);
try {
// Retry the setup with a maximum of 5 attempts
await pRetry(
async () => {
await service.callApi('POST /internal/observability_ai_assistant/kb/setup', {
signal: null,
});
},
{
retries: 5,
}
);
if (ml.mlApi?.savedObjects.syncSavedObjects) {
await ml.mlApi.savedObjects.syncSavedObjects();
}
// Refresh status after installation
statusRequest.refresh();
} catch (error) {
notifications!.toasts.addError(error, {
title: i18n.translate('xpack.aiAssistant.errorSettingUpInferenceEndpoint', {
defaultMessage: 'Could not create inference endpoint',
}),
});
useEffect(() => {
// toggle warming up state to false once KB is ready
if (isWarmingUpModel && statusRequest.value?.kbState === KnowledgeBaseState.READY) {
setIsWarmingUpModel(false);
}
}, [ml, service, notifications, statusRequest]);
}, [isWarmingUpModel, statusRequest]);
// poll the status if isPolling (inference endpoint is created but deployment is not ready)
const install = useCallback(
async (inferenceId: string) => {
setIsInstalling(true);
try {
// Retry the setup with a maximum of 5 attempts
await pRetry(
async () => {
await service.callApi('POST /internal/observability_ai_assistant/kb/setup', {
params: {
query: {
inference_id: inferenceId,
},
},
signal: null,
});
},
{
retries: 5,
}
);
if (ml.mlApi?.savedObjects.syncSavedObjects) {
await ml.mlApi.savedObjects.syncSavedObjects();
}
// Refresh status after installation
statusRequest.refresh();
} catch (error) {
notifications!.toasts.addError(error, {
title: i18n.translate('xpack.aiAssistant.errorSettingUpKnowledgeBase', {
defaultMessage: 'Could not setup knowledge base',
}),
});
}
},
[ml, service, notifications, statusRequest]
);
const warmupModel = useCallback(
async (inferenceId: string) => {
setIsWarmingUpModel(true);
try {
await service.callApi('POST /internal/observability_ai_assistant/kb/warmup_model', {
params: {
query: {
inference_id: inferenceId,
},
},
signal: null,
});
// Refresh status after warming up model
statusRequest.refresh();
} catch (error) {
notifications!.toasts.addError(error, {
title: i18n.translate('xpack.aiAssistant.errorWarmingupModel', {
defaultMessage: 'Could not warm up knowledge base model',
}),
});
}
},
[service, notifications, statusRequest]
);
// poll the status if isPolling
useEffect(() => {
if (!isPolling) {
return;
@ -98,5 +145,7 @@ export function useKnowledgeBase(): UseKnowledgeBaseResult {
install,
isInstalling,
isPolling,
warmupModel,
isWarmingUpModel,
};
}

View file

@ -0,0 +1,166 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { useState } from 'react';
import {
EuiButton,
EuiButtonEmpty,
EuiFlexGroup,
EuiFlexItem,
EuiPopover,
EuiSpacer,
EuiText,
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public';
import { UseKnowledgeBaseResult } from '../hooks';
import { WelcomeMessageKnowledgeBaseSetupErrorPanel } from './welcome_message_knowledge_base_setup_error_panel';
import { SelectModelAndInstallKnowledgeBase } from './select_model_and_install_knowledge_base';
import { SettingUpKnowledgeBase } from './setting_up_knowledge_base';
const WarmUpModel = ({
knowledgeBase,
pendingDeployment = false,
}: {
knowledgeBase: UseKnowledgeBaseResult;
pendingDeployment?: boolean;
}) => {
const currentInferenceId = knowledgeBase.status.value?.endpoint?.inference_id;
const handleWarmup = () => {
knowledgeBase.warmupModel(currentInferenceId!);
};
const label = (
<EuiText
color="subdued"
size="s"
data-test-subj="observabilityAiAssistantKnowledgeBaseModelPendingText"
>
{i18n.translate(
knowledgeBase.isWarmingUpModel
? 'xpack.aiAssistant.welcomeMessage.redeployingKnowledgeBaseTextLabel'
: pendingDeployment
? 'xpack.aiAssistant.welcomeMessage.knowledgeBaseStoppedTextLabel'
: 'xpack.aiAssistant.welcomeMessage.knowledgeBasePausedTextLabel',
{
defaultMessage: knowledgeBase.isWarmingUpModel
? 'Re-deploying knowledge base model...'
: pendingDeployment
? 'Your knowledge base model has been stopped'
: 'Knowledge base model paused due to inactivity.',
}
)}
</EuiText>
);
return (
<>
{label}
<EuiSpacer size="m" />
<EuiFlexGroup justifyContent="center">
<EuiFlexItem grow={false}>
<EuiButton
color="primary"
fill
isLoading={knowledgeBase.isWarmingUpModel}
data-test-subj="observabilityAiAssistantKnowledgeBaseReDeployModelButton"
onClick={handleWarmup}
>
{i18n.translate('xpack.aiAssistant.knowledgeBase.wakeUpKnowledgeBaseModel', {
defaultMessage: 'Re-deploy Model',
})}
</EuiButton>
</EuiFlexItem>
</EuiFlexGroup>
</>
);
};
const InspectKnowledgeBasePopover = ({
knowledgeBase,
}: {
knowledgeBase: UseKnowledgeBaseResult;
}) => {
const [isPopoverOpen, setIsPopoverOpen] = useState(false);
const handleInstall = async (inferenceId: string) => {
setIsPopoverOpen(false);
await knowledgeBase.install(inferenceId);
};
return knowledgeBase.status.value?.modelStats ? (
<EuiFlexItem grow={false}>
<EuiPopover
button={
<EuiButtonEmpty
data-test-subj="observabilityAiAssistantWelcomeMessageInspectErrorsButton"
iconType="inspect"
onClick={() => setIsPopoverOpen(!isPopoverOpen)}
>
{i18n.translate('xpack.aiAssistant.welcomeMessage.inspectErrorsButtonEmptyLabel', {
defaultMessage: 'Inspect',
})}
</EuiButtonEmpty>
}
isOpen={isPopoverOpen}
panelPaddingSize="none"
closePopover={() => setIsPopoverOpen(false)}
>
<WelcomeMessageKnowledgeBaseSetupErrorPanel
knowledgeBase={knowledgeBase}
onRetryInstall={handleInstall}
/>
</EuiPopover>
</EuiFlexItem>
) : null;
};
export const KnowledgeBaseInstallationStatusPanel = ({
knowledgeBase,
}: {
knowledgeBase: UseKnowledgeBaseResult;
}) => {
switch (knowledgeBase.status.value?.kbState) {
case KnowledgeBaseState.NOT_INSTALLED:
return (
<>
<EuiSpacer size="l" />
<EuiFlexItem grow={false}>
<SelectModelAndInstallKnowledgeBase
onInstall={knowledgeBase.install}
isInstalling={knowledgeBase.isInstalling}
/>
</EuiFlexItem>
</>
);
case KnowledgeBaseState.MODEL_PENDING_DEPLOYMENT:
return <WarmUpModel knowledgeBase={knowledgeBase} pendingDeployment />;
case KnowledgeBaseState.DEPLOYING_MODEL:
return (
<>
<SettingUpKnowledgeBase />
<InspectKnowledgeBasePopover knowledgeBase={knowledgeBase} />
</>
);
case KnowledgeBaseState.MODEL_PENDING_ALLOCATION:
return <WarmUpModel knowledgeBase={knowledgeBase} />;
case KnowledgeBaseState.ERROR:
return (
<>
<EuiText color="subdued" size="s">
{i18n.translate('xpack.aiAssistant.welcomeMessage.SettingUpFailTextLabel', {
defaultMessage: `Knowledge Base setup failed. Check 'Inspect' for details.`,
})}
</EuiText>
<InspectKnowledgeBasePopover knowledgeBase={knowledgeBase} />
</>
);
default:
return null;
}
};

View file

@ -0,0 +1,70 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React from 'react';
import { render, screen, fireEvent, waitFor } from '@testing-library/react';
import type { ModelOptionsData } from '../utils/get_model_options_for_inference_endpoints';
import { SelectModelAndInstallKnowledgeBase } from './select_model_and_install_knowledge_base';
jest.mock('../hooks/use_inference_endpoints', () => ({
useInferenceEndpoints: () => ({
inferenceEndpoints: [{ inference_id: 'id1' }, { inference_id: 'id2' }],
isLoading: false,
}),
}));
jest.mock('../utils/get_model_options_for_inference_endpoints', () => ({
getModelOptionsForInferenceEndpoints: ({ endpoints }: { endpoints: any[] }): ModelOptionsData[] =>
endpoints.map((e, i) => ({
key: e.inference_id,
label: `Label${i + 1}`,
description: `Desc${i + 1}`,
})),
}));
const onInstall = jest.fn();
function renderComponent() {
return render(<SelectModelAndInstallKnowledgeBase onInstall={onInstall} isInstalling={false} />);
}
describe('SelectModelAndInstallKnowledgeBase', () => {
beforeEach(() => {
renderComponent();
});
it('renders heading, subtitle, and the dropdown with a default model selected', () => {
expect(screen.getByRole('heading', { level: 3 })).toHaveTextContent(
'Get started by setting up the Knowledge Base'
);
const learnMore = screen.getByRole('link', { name: /Learn more/i });
expect(learnMore).toHaveAttribute('href', expect.stringContaining('ml-nlp-built-in-models'));
expect(screen.getByText('Label1')).toBeInTheDocument();
});
it('calls onInstall with default id when the install button is clicked', () => {
const installBtn = screen.getByRole('button', { name: /Install Knowledge Base/i });
fireEvent.click(installBtn);
expect(onInstall).toHaveBeenCalledWith('id1');
});
it('allows changing selection and installing the KB with the inference_id for the new model', async () => {
const defaultSelection = screen.getByText('Label1');
fireEvent.click(defaultSelection);
const nextSelection = screen.getByText('Label2');
await waitFor(() => nextSelection);
fireEvent.click(nextSelection);
expect(nextSelection).toBeInTheDocument();
fireEvent.click(screen.getByRole('button', { name: /Install Knowledge Base/i }));
expect(onInstall).toHaveBeenCalledWith('id2');
});
});

View file

@ -0,0 +1,179 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { useEffect, useState } from 'react';
import { css } from '@emotion/css';
import {
EuiButton,
EuiFlexGroup,
EuiFlexItem,
EuiIcon,
EuiLink,
EuiSpacer,
EuiSuperSelect,
EuiText,
euiCanAnimate,
useEuiTheme,
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { isHttpFetchError } from '@kbn/core-http-browser';
import { useInferenceEndpoints } from '../hooks/use_inference_endpoints';
import {
ModelOptionsData,
getModelOptionsForInferenceEndpoints,
} from '../utils/get_model_options_for_inference_endpoints';
import { fadeInAnimation } from '../chat/welcome_message_connectors';
interface SelectModelAndInstallKnowledgeBaseProps {
onInstall: (inferenceId: string) => Promise<void>;
isInstalling: boolean;
}
export function SelectModelAndInstallKnowledgeBase({
onInstall,
isInstalling,
}: SelectModelAndInstallKnowledgeBaseProps) {
const { euiTheme } = useEuiTheme();
const fadeInClassName = css`
${euiCanAnimate} {
animation: ${fadeInAnimation} ${euiTheme.animation.normal} ease-in-out;
}
`;
const [selectedInferenceId, setSelectedInferenceId] = useState<string>('');
const { inferenceEndpoints, isLoading: isLoadingEndpoints, error } = useInferenceEndpoints();
useEffect(() => {
if (!selectedInferenceId && inferenceEndpoints.length) {
setSelectedInferenceId(inferenceEndpoints[0].inference_id);
}
}, [inferenceEndpoints, selectedInferenceId]);
const handleInstall = () => {
if (selectedInferenceId) {
onInstall(selectedInferenceId);
}
};
const modelOptions: ModelOptionsData[] = getModelOptionsForInferenceEndpoints({
endpoints: inferenceEndpoints,
});
const superSelectOptions = modelOptions.map((option: ModelOptionsData) => ({
value: option.key,
inputDisplay: option.label,
dropdownDisplay: (
<div>
<strong>{option.label}</strong>
<EuiText size="xs" color="subdued" css={{ marginTop: 4 }}>
{option.description}
</EuiText>
</div>
),
}));
if (error) {
const isForbiddenError =
isHttpFetchError(error) && (error.body as { statusCode: number }).statusCode === 403;
return (
<div
className={fadeInClassName}
data-test-subj="observabilityAiAssistantInferenceEndpointsError"
>
<EuiFlexGroup direction="row" alignItems="center" justifyContent="center" gutterSize="xs">
<EuiFlexItem grow={false}>
<EuiIcon type="alert" color="danger" />
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiText color="danger">
{isForbiddenError
? i18n.translate(
'xpack.aiAssistant.knowledgeBase.inferenceEndpointsForbiddenTextLabel',
{
defaultMessage: 'Required privileges to fetch available models are missing',
}
)
: i18n.translate(
'xpack.aiAssistant.knowledgeBase.inferenceEndpointsErrorTextLabel',
{
defaultMessage: 'Could not load models',
}
)}
</EuiText>
</EuiFlexItem>
</EuiFlexGroup>
</div>
);
}
return (
<>
<EuiText textAlign="center">
<h3>
{i18n.translate('xpack.aiAssistant.knowledgeBase.getStarted', {
defaultMessage: 'Get started by setting up the Knowledge Base',
})}
</h3>
</EuiText>
<EuiSpacer size="s" />
<EuiText size="s" color="subdued" textAlign="center">
{i18n.translate('xpack.aiAssistant.knowledgeBase.chooseModelSubtitle', {
defaultMessage: "Choose the default language model for the Assistant's responses.",
})}{' '}
<EuiLink
href="https://www.elastic.co/docs/explore-analyze/machine-learning/nlp/ml-nlp-built-in-models"
target="_blank"
>
{i18n.translate('xpack.aiAssistant.knowledgeBase.subtitleLearnMore', {
defaultMessage: 'Learn more',
})}
</EuiLink>
</EuiText>
<EuiSpacer size="l" />
<EuiFlexGroup justifyContent="center">
<EuiFlexItem grow={false} css={{ width: 320 }}>
<EuiSuperSelect
fullWidth
hasDividers
isLoading={isLoadingEndpoints}
options={superSelectOptions}
valueOfSelected={selectedInferenceId}
onChange={(value) => setSelectedInferenceId(value)}
disabled={isInstalling}
data-test-subj="observabilityAiAssistantKnowledgeBaseModelDropdown"
/>
</EuiFlexItem>
</EuiFlexGroup>
<EuiSpacer size="m" />
<EuiFlexGroup justifyContent="center">
<EuiFlexItem grow={false}>
<EuiButton
color="primary"
fill
isLoading={isInstalling}
iconType="importAction"
data-test-subj="observabilityAiAssistantWelcomeMessageSetUpKnowledgeBaseButton"
onClick={handleInstall}
>
{i18n.translate('xpack.aiAssistant.knowledgeBase.installButtonLabel', {
defaultMessage: 'Install Knowledge Base',
})}
</EuiButton>
</EuiFlexItem>
</EuiFlexGroup>
</>
);
}

View file

@ -0,0 +1,32 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React from 'react';
import { i18n } from '@kbn/i18n';
import { EuiButtonEmpty, EuiSpacer, EuiText } from '@elastic/eui';
export const SettingUpKnowledgeBase = () => (
<>
<EuiText color="subdued" size="s">
{i18n.translate('xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel', {
defaultMessage:
'We are setting up your knowledge base. This may take a few minutes. You can continue to use the Assistant while this process is underway.',
})}
</EuiText>
<EuiSpacer size="m" />
<EuiButtonEmpty
data-test-subj="observabilityAiAssistantWelcomeMessageSettingUpKnowledgeBaseText"
isLoading
>
{i18n.translate('xpack.aiAssistant.welcomeMessage.div.settingUpKnowledgeBaseLabel', {
defaultMessage: 'Setting up Knowledge Base',
})}
</EuiButtonEmpty>
</>
);

View file

@ -8,38 +8,74 @@
import React from 'react';
import { act, render, screen } from '@testing-library/react';
import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public';
import { WelcomeMessageKnowledgeBase } from './welcome_message_knowledge_base';
import type { UseKnowledgeBaseResult } from '../hooks/use_knowledge_base';
jest.mock('../hooks/use_inference_endpoints', () => ({
useInferenceEndpoints: () => ({
inferenceEndpoints: [{ inference_id: 'id1' }, { inference_id: 'id2' }],
isLoading: false,
}),
}));
function createMockKnowledgeBase(
partial: Partial<UseKnowledgeBaseResult> = {}
): UseKnowledgeBaseResult {
return {
isInstalling: partial.isInstalling ?? false,
isPolling: partial.isPolling ?? false,
install: partial.install ?? (async (_id: string) => {}),
warmupModel: partial.warmupModel ?? (async (_id: string) => {}),
isWarmingUpModel: partial.isWarmingUpModel ?? false,
status: partial.status ?? {
value: {
enabled: true,
errorMessage: undefined,
kbState: KnowledgeBaseState.NOT_INSTALLED,
concreteWriteIndex: undefined,
currentInferenceId: undefined,
isReIndexing: false,
},
loading: false,
error: undefined,
refresh: jest.fn(),
},
};
}
function renderComponent(kb: UseKnowledgeBaseResult) {
return render(<WelcomeMessageKnowledgeBase knowledgeBase={kb} />);
}
describe('WelcomeMessageKnowledgeBase', () => {
afterEach(() => {
jest.clearAllMocks();
});
function createMockKnowledgeBase(
partial: Partial<UseKnowledgeBaseResult> = {}
): UseKnowledgeBaseResult {
return {
isInstalling: partial.isInstalling ?? false,
isPolling: partial.isPolling ?? false,
install: partial.install ?? jest.fn(),
status: partial.status ?? {
it(`renders the "not set up" state if server returns errorMessage (no model exists) but user hasn't started installing`, async () => {
const kb = createMockKnowledgeBase({
isInstalling: false,
install: jest.fn(async (_id: string) => {}),
status: {
value: {
enabled: true,
errorMessage: undefined,
kbState: KnowledgeBaseState.NOT_INSTALLED,
errorMessage: 'no model',
concreteWriteIndex: undefined,
currentInferenceId: undefined,
isReIndexing: false,
},
loading: false,
error: undefined,
refresh: jest.fn(),
},
};
}
});
function renderComponent(kb: UseKnowledgeBaseResult) {
return render(<WelcomeMessageKnowledgeBase knowledgeBase={kb} />);
}
renderComponent(kb);
expect(screen.getByText(/Get started by setting up the Knowledge Base/i)).toBeInTheDocument();
expect(screen.getByText(/Install Knowledge Base/i)).toBeInTheDocument();
expect(screen.queryByText(/Inspect/i)).toBeNull();
});
it('renders install message if isInstalling', () => {
const kb = createMockKnowledgeBase({
@ -48,7 +84,10 @@ describe('WelcomeMessageKnowledgeBase', () => {
value: {
enabled: true,
endpoint: { inference_id: 'inference_id' },
kbState: KnowledgeBaseState.PENDING_MODEL_DEPLOYMENT,
kbState: KnowledgeBaseState.DEPLOYING_MODEL,
concreteWriteIndex: 'my-index',
currentInferenceId: 'inference_id',
isReIndexing: false,
},
loading: false,
refresh: jest.fn(),
@ -66,7 +105,13 @@ describe('WelcomeMessageKnowledgeBase', () => {
isInstalling: true,
isPolling: true,
status: {
value: { enabled: true, kbState: KnowledgeBaseState.NOT_INSTALLED },
value: {
enabled: true,
kbState: KnowledgeBaseState.NOT_INSTALLED,
concreteWriteIndex: 'my-index',
currentInferenceId: 'inference_id',
isReIndexing: false,
},
loading: false,
refresh: jest.fn(),
},
@ -87,6 +132,9 @@ describe('WelcomeMessageKnowledgeBase', () => {
enabled: true,
endpoint: { inference_id: 'inference_id' },
kbState: KnowledgeBaseState.READY,
concreteWriteIndex: 'my-index',
currentInferenceId: 'inference_id',
isReIndexing: false,
},
loading: false,
refresh: jest.fn(),
@ -110,6 +158,9 @@ describe('WelcomeMessageKnowledgeBase', () => {
enabled: true,
endpoint: { inference_id: 'inference_id' },
kbState: KnowledgeBaseState.DEPLOYING_MODEL,
concreteWriteIndex: 'my-index',
currentInferenceId: 'inference_id',
isReIndexing: false,
modelStats: {
deployment_stats: {
state: 'starting',
@ -132,7 +183,7 @@ describe('WelcomeMessageKnowledgeBase', () => {
expect(screen.getByText(/Inspect/i)).toBeInTheDocument();
});
it('renders "Base setup failed" with inspect issues', () => {
it('renders "Knowledge Base setup failed" with inspect issues', () => {
const kb = createMockKnowledgeBase({
isInstalling: false,
isPolling: true,
@ -141,6 +192,9 @@ describe('WelcomeMessageKnowledgeBase', () => {
enabled: true,
endpoint: { inference_id: 'inference_id' },
kbState: KnowledgeBaseState.ERROR,
concreteWriteIndex: 'my-index',
currentInferenceId: 'inference_id',
isReIndexing: false,
modelStats: {
deployment_stats: {
reason: 'model deployment failed',
@ -163,46 +217,23 @@ describe('WelcomeMessageKnowledgeBase', () => {
expect(
screen.getByText(/Knowledge Base setup failed. Check 'Inspect' for details./i)
).toBeInTheDocument();
// Because we have an reason error, we also see "Inspect" button
expect(screen.getAllByText(/Inspect/i)).toHaveLength(2);
});
it('renders "not set up" if server returns errorMessage (no endpoint exists) but user hasnt started installing', () => {
// this happens when no endpoint exists because user has never installed
// which can happen for on prem users with preconfigured connector where /setup is not
// automatically called
it('renders "We are setting up your knowledge base" if model is not ready but endpoint exists', () => {
const kb = createMockKnowledgeBase({
isInstalling: false,
status: {
value: {
enabled: true,
kbState: KnowledgeBaseState.NOT_INSTALLED,
errorMessage: 'no endpoint',
},
loading: false,
refresh: jest.fn(),
},
});
renderComponent(kb);
expect(screen.getByText(/Your Knowledge base hasn't been set up/i)).toBeInTheDocument();
expect(screen.getByText(/Install Knowledge base/i)).toBeInTheDocument();
expect(screen.queryByText(/Inspect/i)).toBeNull();
});
it('renders "not set up" if model is not ready (but no errorMessage because endpoint exists)', () => {
// This could happen if the user manually stopped the model in ML,
// so we have no install error, but ready = false
const kb = createMockKnowledgeBase({
isInstalling: false,
isPolling: true,
status: {
value: {
enabled: true,
endpoint: { inference_id: 'inference_id' },
kbState: KnowledgeBaseState.DEPLOYING_MODEL,
concreteWriteIndex: 'my-index',
currentInferenceId: 'inference_id',
isReIndexing: false,
modelStats: {
deployment_stats: {
reason: 'model deployment failed',
reason: 'model deployment paused',
deployment_id: 'deployment_id',
model_id: 'model_id',
nodes: [],
@ -211,10 +242,8 @@ describe('WelcomeMessageKnowledgeBase', () => {
start_time: 0,
},
},
enabled: true,
},
loading: false,
error: undefined,
refresh: jest.fn(),
},
});
@ -231,7 +260,9 @@ describe('WelcomeMessageKnowledgeBase', () => {
kbState: KnowledgeBaseState.READY,
endpoint: { inference_id: 'inference_id' },
enabled: true,
errorMessage: undefined,
concreteWriteIndex: 'my-index',
currentInferenceId: 'inference_id',
isReIndexing: false,
},
loading: false,
error: undefined,
@ -241,7 +272,7 @@ describe('WelcomeMessageKnowledgeBase', () => {
renderComponent(kb);
expect(screen.queryByText(/We are setting up your knowledge base/i)).toBeNull();
expect(screen.queryByText(/Your Knowledge base hasn't been set up/i)).toBeNull();
expect(screen.queryByText(/Get started by setting up the Knowledge Base/i)).toBeNull();
expect(screen.queryByText(/Knowledge base successfully installed/i)).toBeNull();
});
});

View file

@ -0,0 +1,60 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { useEffect, useState } from 'react';
import { i18n } from '@kbn/i18n';
import { EuiFlexGroup, EuiFlexItem, EuiIcon, EuiText } from '@elastic/eui';
import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public';
import usePrevious from 'react-use/lib/usePrevious';
import { UseKnowledgeBaseResult } from '../hooks';
import { KnowledgeBaseInstallationStatusPanel } from './knowledge_base_installation_status_panel';
import { SettingUpKnowledgeBase } from './setting_up_knowledge_base';
export function WelcomeMessageKnowledgeBase({
knowledgeBase,
}: {
knowledgeBase: UseKnowledgeBaseResult;
}) {
const prevIsInstalling = usePrevious(knowledgeBase.isInstalling || knowledgeBase.isPolling);
const [showSuccessBanner, setShowSuccessBanner] = useState(false);
useEffect(() => {
if (prevIsInstalling) {
setShowSuccessBanner(true);
}
}, [knowledgeBase.isInstalling, prevIsInstalling]);
if (knowledgeBase.isInstalling) {
return <SettingUpKnowledgeBase />;
}
if (knowledgeBase.status.value?.kbState === KnowledgeBaseState.READY) {
return showSuccessBanner ? (
<div>
<EuiFlexGroup alignItems="center" gutterSize="s" justifyContent="center">
<EuiFlexItem grow={false}>
<EuiIcon type="checkInCircleFilled" color="success" />
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiText
color="subdued"
size="s"
data-test-subj="observabilityAiAssistantKnowledgeBaseInstalled"
>
{i18n.translate(
'xpack.aiAssistant.welcomeMessage.knowledgeBaseSuccessfullyInstalledLabel',
{ defaultMessage: 'Knowledge base successfully installed' }
)}
</EuiText>
</EuiFlexItem>
</EuiFlexGroup>
</div>
) : null;
}
return <KnowledgeBaseInstallationStatusPanel knowledgeBase={knowledgeBase} />;
}

View file

@ -33,7 +33,7 @@ export function WelcomeMessageKnowledgeBaseSetupErrorPanel({
onRetryInstall,
}: {
knowledgeBase: UseKnowledgeBaseResult;
onRetryInstall: () => void;
onRetryInstall: (inferenceId: string) => void;
}) {
const { http } = useKibana().services;
@ -42,6 +42,7 @@ export function WelcomeMessageKnowledgeBaseSetupErrorPanel({
const deploymentReason = knowledgeBase.status.value?.modelStats?.deployment_stats?.reason;
const allocationState =
knowledgeBase.status.value?.modelStats?.deployment_stats?.allocation_status?.state;
const inferenceId = knowledgeBase.status.value?.modelStats?.deployment_stats?.deployment_id;
return (
<div
@ -130,7 +131,7 @@ export function WelcomeMessageKnowledgeBaseSetupErrorPanel({
retryInstallingLink: (
<EuiLink
data-test-subj="observabilityAiAssistantWelcomeMessageKnowledgeBaseSetupErrorPanelRetryInstallingLink"
onClick={onRetryInstall}
onClick={() => onRetryInstall(inferenceId!)} // TODO: check behaviour in error state
>
{i18n.translate(
'xpack.aiAssistant.welcomeMessageKnowledgeBaseSetupErrorPanel.retryInstallingLinkLabel',

View file

@ -0,0 +1,40 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import {
e5SmallDescription,
e5SmallTitle,
elserDescription,
elserTitle,
getModelOptionsForInferenceEndpoints,
ModelOptionsData,
} from './get_model_options_for_inference_endpoints';
import type { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
describe('getModelOptionsForInferenceEndpoints', () => {
it('maps known inference endpoints to user-friendly titles and descriptions', () => {
const endpoints = [
{ inference_id: '.elser-2-elasticsearch' },
{ inference_id: '.multilingual-e5-small-elasticsearch' },
] as InferenceAPIConfigResponse[];
const options: ModelOptionsData[] = getModelOptionsForInferenceEndpoints({ endpoints });
expect(options).toEqual([
{
key: '.elser-2-elasticsearch',
label: elserTitle,
description: elserDescription,
},
{
key: '.multilingual-e5-small-elasticsearch',
label: e5SmallTitle,
description: e5SmallDescription,
},
]);
});
});

View file

@ -0,0 +1,107 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { i18n } from '@kbn/i18n';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
export interface ModelOptionsData {
key: string;
label: string;
description: string;
}
export const elserTitle = i18n.translate(
'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.elserTitle',
{
defaultMessage: 'ELSER v2 (English-only)',
}
);
export const elserDescription = i18n.translate(
'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.elserDescription',
{
defaultMessage:
'Focus on query meaning, not just keyword matching, using learned associations between terms. It delivers more relevant, context-aware results and works out of the box with no need for deep machine learning expertise.',
}
);
export const e5SmallTitle = i18n.translate(
'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.e5smallTitle',
{
defaultMessage: 'E5-small (multilingual)',
}
);
export const e5SmallDescription = i18n.translate(
'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.e5smallDescription',
{
defaultMessage:
'E5 is an NLP model by Elastic designed to enhance multilingual semantic search by focusing on query context rather than keywords. E5-small is a cross-platform version compatible with different hardware configurations.',
}
);
const e5LargeTitle = i18n.translate(
'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.e5largeTitle',
{
defaultMessage: 'E5-large (multilingual)',
}
);
const e5LargeDescription = i18n.translate(
'xpack.aiAssistant.welcomeMessage.knowledgeBase.model.e5largeDescription',
{
defaultMessage:
'E5 is an NLP model by Elastic designed to enhance multilingual semantic search by focusing on query context rather than keywords. E5-large is an optimized version for Intel® silicon.',
}
);
const PRECONFIGURED_INFERENCE_ENDPOINT_METADATA: Record<
string,
{ title: string; description: string }
> = {
'.elser-2-elasticsearch': {
title: elserTitle,
description: elserDescription,
},
'.elser-v2-elastic': {
title: elserTitle,
description: elserDescription,
},
'.multilingual-e5-small-elasticsearch': {
title: e5SmallTitle,
description: e5SmallDescription,
},
'.multilingual-e5-large-elasticsearch': {
title: e5LargeTitle,
description: e5LargeDescription,
},
};
export const getModelOptionsForInferenceEndpoints = ({
endpoints,
}: {
endpoints: InferenceAPIConfigResponse[];
}): ModelOptionsData[] => {
// TODO: add logic to show the EIS models if EIS is enabled, if not show the other models
const preConfiguredEndpoints = endpoints
.map((endpoint) => {
const meta = PRECONFIGURED_INFERENCE_ENDPOINT_METADATA[endpoint.inference_id];
if (!meta) {
return undefined;
}
return {
key: endpoint.inference_id,
label: meta.title,
description: meta.description,
};
})
.filter(Boolean) as ModelOptionsData[];
return preConfiguredEndpoints;
};

View file

@ -42,5 +42,6 @@
"@kbn/ai-assistant-icon",
"@kbn/datemath",
"@kbn/security-plugin-types-common",
"@kbn/ml-trained-models-utils",
]
}

View file

@ -6,6 +6,7 @@
*/
import { useQuery } from '@tanstack/react-query';
import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public';
import { REACT_QUERY_KEYS } from '../constants';
import { useKibana } from './use_kibana';
@ -13,10 +14,12 @@ export function useGetKnowledgeBaseEntries({
query,
sortBy,
sortDirection,
kbState,
}: {
query: string;
sortBy: string;
sortDirection: 'asc' | 'desc';
kbState?: KnowledgeBaseState;
}) {
const { observabilityAIAssistant } = useKibana().services;
@ -40,6 +43,7 @@ export function useGetKnowledgeBaseEntries({
},
});
},
enabled: kbState === KnowledgeBaseState.READY,
keepPreviousData: true,
refetchOnWindowFocus: false,
});

View file

@ -7,6 +7,7 @@
import React from 'react';
import { fireEvent } from '@testing-library/react';
import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public';
import { useGenAIConnectors, useKnowledgeBase } from '@kbn/ai-assistant/src/hooks';
import { render } from '../../helpers/test_helper';
import { useCreateKnowledgeBaseEntry } from '../../hooks/use_create_knowledge_base_entry';
@ -14,13 +15,18 @@ import { useDeleteKnowledgeBaseEntry } from '../../hooks/use_delete_knowledge_ba
import { useGetKnowledgeBaseEntries } from '../../hooks/use_get_knowledge_base_entries';
import { useImportKnowledgeBaseEntries } from '../../hooks/use_import_knowledge_base_entries';
import { KnowledgeBaseTab } from './knowledge_base_tab';
import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/public';
jest.mock('../../hooks/use_get_knowledge_base_entries');
jest.mock('../../hooks/use_create_knowledge_base_entry');
jest.mock('../../hooks/use_import_knowledge_base_entries');
jest.mock('../../hooks/use_delete_knowledge_base_entry');
jest.mock('@kbn/ai-assistant/src/hooks');
jest.mock('@kbn/ai-assistant/src/hooks/use_inference_endpoints', () => ({
useInferenceEndpoints: () => ({
inferenceEndpoints: [{ inference_id: 'id1' }, { inference_id: 'id2' }],
isLoading: false,
}),
}));
const useGetKnowledgeBaseEntriesMock = useGetKnowledgeBaseEntries as jest.Mock;
const useCreateKnowledgeBaseEntryMock = useCreateKnowledgeBaseEntry as jest.Mock;
@ -111,6 +117,7 @@ describe('KnowledgeBaseTab', () => {
install: jest.fn(),
});
});
it('should render a table', () => {
const { getByTestId } = render(<KnowledgeBaseTab />);
expect(getByTestId('knowledgeBaseTable')).toBeInTheDocument();

View file

@ -21,19 +21,23 @@ import {
EuiFlexGroup,
EuiFlexItem,
EuiIcon,
EuiLink,
EuiLoadingSpinner,
EuiPanel,
EuiPopover,
EuiScreenReaderOnly,
EuiSpacer,
EuiText,
useEuiTheme,
} from '@elastic/eui';
import { WelcomeMessageKnowledgeBase } from '@kbn/ai-assistant/src/chat/welcome_message_knowledge_base';
import { css } from '@emotion/css';
import {
KnowledgeBaseEntry,
KnowledgeBaseState,
} from '@kbn/observability-ai-assistant-plugin/public';
import { useKnowledgeBase } from '@kbn/ai-assistant/src/hooks';
import { AssistantBeacon } from '@kbn/ai-assistant-icon';
import { KnowledgeBaseInstallationStatusPanel } from '@kbn/ai-assistant/src/knowledge_base/knowledge_base_installation_status_panel';
import { SettingUpKnowledgeBase } from '@kbn/ai-assistant/src/knowledge_base/setting_up_knowledge_base';
import { useGetKnowledgeBaseEntries } from '../../hooks/use_get_knowledge_base_entries';
import { categorizeEntries, KnowledgeBaseEntryCategory } from '../../helpers/categorize_entries';
import { KnowledgeBaseEditManualEntryFlyout } from './knowledge_base_edit_manual_entry_flyout';
@ -50,11 +54,16 @@ const centerMaxWidthClassName = css`
text-align: center;
`;
const panelClassname = css`
width: 100%;
`;
export function KnowledgeBaseTab() {
const { uiSettings } = useKibana().services;
const dateFormat = uiSettings.get('dateFormat');
const knowledgeBase = useKnowledgeBase();
const { euiTheme } = useEuiTheme();
const columns: Array<EuiBasicTableColumn<KnowledgeBaseEntryCategory>> = [
{
@ -211,7 +220,13 @@ export function KnowledgeBaseTab() {
entries = [],
isLoading,
refetch,
} = useGetKnowledgeBaseEntries({ query, sortBy, sortDirection });
} = useGetKnowledgeBaseEntries({
query,
sortBy,
sortDirection,
kbState: knowledgeBase.status.value?.kbState,
});
const categorizedEntries = categorizeEntries({ entries });
const handleChangeSort = ({ sort }: Criteria<KnowledgeBaseEntryCategory>) => {
@ -226,7 +241,7 @@ export function KnowledgeBaseTab() {
setQuery(e?.currentTarget.value || '');
};
if (knowledgeBase.status.loading) {
if (knowledgeBase.status.loading && !knowledgeBase.isInstalling) {
return (
<EuiFlexGroup alignItems="center" direction="column">
<EuiFlexItem grow>
@ -236,183 +251,211 @@ export function KnowledgeBaseTab() {
);
}
return knowledgeBase.status.value?.kbState === KnowledgeBaseState.READY ? (
<>
<EuiFlexGroup direction="column">
<EuiFlexItem grow={false}>
<EuiFlexGroup gutterSize="s">
<EuiFlexItem grow>
<EuiFieldSearch
data-test-subj="knowledgeBaseTabFieldSearch"
fullWidth
placeholder={i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.euiFieldSearch.searchThisLabel',
{ defaultMessage: 'Search for an entry' }
)}
value={query}
onChange={handleChangeQuery}
isClearable
aria-label={i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.euiFieldSearch.searchEntriesLabel',
{ defaultMessage: 'Search entries' }
)}
/>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiButton
data-test-subj="knowledgeBaseTabReloadButton"
color="success"
iconType="refresh"
onClick={() => refetch()}
>
{i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.reloadButtonLabel',
{ defaultMessage: 'Reload' }
)}
</EuiButton>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiButton
data-test-subj="observabilityAiAssistantManagementKnowledgeBaseTabEditInstructionsButton"
color="text"
onClick={() => setIsEditUserInstructionFlyoutOpen(true)}
>
{i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.editInstructionsButtonLabel',
{ defaultMessage: 'Edit User-specific Prompt' }
)}
</EuiButton>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiPopover
isOpen={isNewEntryPopoverOpen}
closePopover={() => setIsNewEntryPopoverOpen(false)}
button={
<EuiButton
fill
data-test-subj="knowledgeBaseNewEntryButton"
iconSide="right"
iconType="arrowDown"
onClick={() => setIsNewEntryPopoverOpen((prevValue) => !prevValue)}
>
{i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.newEntryButtonLabel',
{
defaultMessage: 'New entry',
}
)}
</EuiButton>
}
>
<EuiContextMenuPanel
size="s"
items={[
<EuiContextMenuItem
key="singleEntry"
icon="document"
data-test-subj="knowledgeBaseSingleEntryContextMenuItem"
onClick={() => {
setIsNewEntryPopoverOpen(false);
setNewEntryFlyoutType('singleEntry');
}}
size="s"
>
{i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.singleEntryContextMenuItemLabel',
{ defaultMessage: 'Single entry' }
)}
</EuiContextMenuItem>,
<EuiContextMenuItem
key="bulkImport"
icon="documents"
data-test-subj="knowledgeBaseBulkImportContextMenuItem"
onClick={() => {
setIsNewEntryPopoverOpen(false);
setNewEntryFlyoutType('bulkImport');
}}
>
{i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.bulkImportContextMenuItemLabel',
{ defaultMessage: 'Bulk import' }
)}
</EuiContextMenuItem>,
]}
if (knowledgeBase.status.value?.kbState === KnowledgeBaseState.READY) {
return (
<>
<EuiFlexGroup direction="column">
<EuiFlexItem grow={false}>
<EuiFlexGroup gutterSize="s">
<EuiFlexItem grow>
<EuiFieldSearch
data-test-subj="knowledgeBaseTabFieldSearch"
fullWidth
placeholder={i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.euiFieldSearch.searchThisLabel',
{ defaultMessage: 'Search for an entry' }
)}
value={query}
onChange={handleChangeQuery}
isClearable
aria-label={i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.euiFieldSearch.searchEntriesLabel',
{ defaultMessage: 'Search entries' }
)}
/>
</EuiPopover>
</EuiFlexItem>
</EuiFlexGroup>
</EuiFlexItem>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiBasicTable<KnowledgeBaseEntryCategory>
data-test-subj="knowledgeBaseTable"
columns={columns}
items={categorizedEntries}
loading={isLoading}
sorting={{
sort: {
field: sortBy,
direction: sortDirection,
},
}}
rowProps={(row) => ({
onClick: () => setSelectedCategory(row),
})}
onChange={handleChangeSort}
<EuiFlexItem grow={false}>
<EuiButton
data-test-subj="knowledgeBaseTabReloadButton"
color="success"
iconType="refresh"
onClick={() => refetch()}
>
{i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.reloadButtonLabel',
{ defaultMessage: 'Reload' }
)}
</EuiButton>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiButton
data-test-subj="observabilityAiAssistantManagementKnowledgeBaseTabEditInstructionsButton"
color="text"
onClick={() => setIsEditUserInstructionFlyoutOpen(true)}
>
{i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.editInstructionsButtonLabel',
{ defaultMessage: 'Edit User-specific Prompt' }
)}
</EuiButton>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiPopover
isOpen={isNewEntryPopoverOpen}
closePopover={() => setIsNewEntryPopoverOpen(false)}
button={
<EuiButton
fill
data-test-subj="knowledgeBaseNewEntryButton"
iconSide="right"
iconType="arrowDown"
onClick={() => setIsNewEntryPopoverOpen((prevValue) => !prevValue)}
>
{i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.newEntryButtonLabel',
{
defaultMessage: 'New entry',
}
)}
</EuiButton>
}
>
<EuiContextMenuPanel
size="s"
items={[
<EuiContextMenuItem
key="singleEntry"
icon="document"
data-test-subj="knowledgeBaseSingleEntryContextMenuItem"
onClick={() => {
setIsNewEntryPopoverOpen(false);
setNewEntryFlyoutType('singleEntry');
}}
size="s"
>
{i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.singleEntryContextMenuItemLabel',
{ defaultMessage: 'Single entry' }
)}
</EuiContextMenuItem>,
<EuiContextMenuItem
key="bulkImport"
icon="documents"
data-test-subj="knowledgeBaseBulkImportContextMenuItem"
onClick={() => {
setIsNewEntryPopoverOpen(false);
setNewEntryFlyoutType('bulkImport');
}}
>
{i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.bulkImportContextMenuItemLabel',
{ defaultMessage: 'Bulk import' }
)}
</EuiContextMenuItem>,
]}
/>
</EuiPopover>
</EuiFlexItem>
</EuiFlexGroup>
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiBasicTable<KnowledgeBaseEntryCategory>
data-test-subj="knowledgeBaseTable"
columns={columns}
items={categorizedEntries}
loading={isLoading}
sorting={{
sort: {
field: sortBy,
direction: sortDirection,
},
}}
rowProps={(row) => ({
onClick: () => setSelectedCategory(row),
})}
onChange={handleChangeSort}
/>
</EuiFlexItem>
</EuiFlexGroup>
{isEditUserInstructionFlyoutOpen ? (
<KnowledgeBaseEditUserInstructionFlyout
onClose={() => setIsEditUserInstructionFlyoutOpen(false)}
/>
</EuiFlexItem>
</EuiFlexGroup>
) : null}
{isEditUserInstructionFlyoutOpen ? (
<KnowledgeBaseEditUserInstructionFlyout
onClose={() => setIsEditUserInstructionFlyoutOpen(false)}
/>
) : null}
{newEntryFlyoutType === 'singleEntry' ? (
<KnowledgeBaseEditManualEntryFlyout onClose={() => setNewEntryFlyoutType(undefined)} />
) : null}
{newEntryFlyoutType === 'singleEntry' ? (
<KnowledgeBaseEditManualEntryFlyout onClose={() => setNewEntryFlyoutType(undefined)} />
) : null}
{newEntryFlyoutType === 'bulkImport' ? (
<KnowledgeBaseBulkImportFlyout onClose={() => setNewEntryFlyoutType(undefined)} />
) : null}
{newEntryFlyoutType === 'bulkImport' ? (
<KnowledgeBaseBulkImportFlyout onClose={() => setNewEntryFlyoutType(undefined)} />
) : null}
{selectedCategory ? (
selectedCategory.entries.length === 1 &&
(selectedCategory.entries[0].role === 'user_entry' ||
selectedCategory.entries[0].role === 'assistant_summarization') ? (
<KnowledgeBaseEditManualEntryFlyout
entry={selectedCategory.entries[0]}
onClose={() => {
setSelectedCategory(undefined);
refetch();
}}
/>
) : (
<KnowledgeBaseCategoryFlyout
category={selectedCategory}
onClose={() => setSelectedCategory(undefined)}
/>
)
) : null}
</>
);
}
{selectedCategory ? (
selectedCategory.entries.length === 1 &&
(selectedCategory.entries[0].role === 'user_entry' ||
selectedCategory.entries[0].role === 'assistant_summarization') ? (
<KnowledgeBaseEditManualEntryFlyout
entry={selectedCategory.entries[0]}
onClose={() => {
setSelectedCategory(undefined);
refetch();
}}
/>
) : (
<KnowledgeBaseCategoryFlyout
category={selectedCategory}
onClose={() => setSelectedCategory(undefined)}
/>
)
) : null}
</>
) : (
return (
<EuiFlexGroup
alignItems="center"
direction="column"
gutterSize="none"
className={fullHeightClassName}
>
<EuiFlexItem grow={false}>
<AssistantBeacon backgroundColor="emptyShade" size="xl" />
</EuiFlexItem>
<EuiText
color="subdued"
css={css`
line-height: ${euiTheme.size.l};
`}
>
{i18n.translate('xpack.observabilityAiAssistantManagement.knowledgeBaseTab.description', {
defaultMessage:
'Knowledge Base is a feature that enables the AI Assistant to recall multiple knowledge sources: documents, organizational resources like runbooks, GitHub issues, and internal documentation. It improves response quality with added context for more tailored assistance. ',
})}
<EuiLink
href="https://www.elastic.co/docs/solutions/observability/observability-ai-assistant#obs-ai-add-data"
target="_blank"
>
{i18n.translate(
'xpack.observabilityAiAssistantManagement.knowledgeBaseTab.learnMoreLink',
{
defaultMessage: 'Learn More',
}
)}
</EuiLink>
</EuiText>
<EuiSpacer size="l" />
<EuiFlexItem grow className={centerMaxWidthClassName}>
<WelcomeMessageKnowledgeBase knowledgeBase={knowledgeBase} />
</EuiFlexItem>
<EuiPanel hasBorder paddingSize="xl" grow={false} className={panelClassname}>
<EuiFlexItem grow className={centerMaxWidthClassName}>
{knowledgeBase.isInstalling ? (
<SettingUpKnowledgeBase />
) : (
<KnowledgeBaseInstallationStatusPanel knowledgeBase={knowledgeBase} />
)}
</EuiFlexItem>
</EuiPanel>
</EuiFlexGroup>
);
}

View file

@ -27,7 +27,6 @@
"@kbn/ai-assistant",
"@kbn/core-plugins-server",
"@kbn/product-doc-base-plugin",
"@kbn/ai-assistant-icon",
"@kbn/ml-plugin",
"@kbn/management-settings-field-definition",
"@kbn/management-settings-types",

View file

@ -10123,7 +10123,6 @@
"xpack.aiAssistant.couldNotFindConversationTitle": "Conversation introuvable",
"xpack.aiAssistant.disclaimer.disclaimerLabel": "Ce chat est alimenté par une intégration avec votre fournisseur LLM. Il arrive que les grands modèles de langage (LLM) présentent comme correctes des informations incorrectes. Elastic prend en charge la configuration ainsi que la connexion au fournisseur LLM et à votre base de connaissances, mais n'est pas responsable des réponses fournies par le LLM.",
"xpack.aiAssistant.emptyConversationTitle": "Nouvelle conversation",
"xpack.aiAssistant.errorSettingUpInferenceEndpoint": "Impossible de créer le point de terminaison d'inférence",
"xpack.aiAssistant.errorUpdatingConversation": "Impossible de mettre à jour la conversation",
"xpack.aiAssistant.executedFunctionFailureEvent": "impossible d'exécuter la fonction {functionName}",
"xpack.aiAssistant.flyout.confirmDeleteButtonText": "Supprimer la conversation",
@ -10154,12 +10153,10 @@
"xpack.aiAssistant.welcomeMessage.modelIsNotDeployedLabel": "Le modèle {modelId} n'est pas déployé",
"xpack.aiAssistant.welcomeMessage.modelIsNotFullyAllocatedLabel": "L'état d'allocation de {modelId} est {allocationState}",
"xpack.aiAssistant.welcomeMessage.modelIsNotStartedLabel": "L'état de déploiement de {modelId} est {deploymentState}",
"xpack.aiAssistant.welcomeMessage.retryButtonLabel": "Installer la base de connaissances",
"xpack.aiAssistant.welcomeMessage.trainedModelsLinkLabel": "Modèles entraînés",
"xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel": "Nous configurons votre base de connaissances. Cette opération peut prendre quelques minutes. Vous pouvez continuer à utiliser l'Assistant lors de ce processus.",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsErrorTextLabel": "Impossible de charger les connecteurs",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsForbiddenTextLabel": "Vous n'avez pas les autorisations requises pour charger les connecteurs",
"xpack.aiAssistant.welcomeMessageKnowledgeBase.yourKnowledgeBaseIsNotSetUpCorrectlyLabel": "Votre base de connaissances n'a pas été configurée.",
"xpack.aiAssistant.welcomeMessageKnowledgeBaseSetupErrorPanel.retryInstallingLinkLabel": "Réessayer l'installation",
"xpack.aiops.actions.openChangePointInMlAppName": "Ouvrir dans AIOps Labs",
"xpack.aiops.analysis.analysisTypeDipFallbackInfoTitle": "Meilleurs éléments pour la plage temporelle de référence de base",
@ -18576,6 +18573,9 @@
"xpack.fleet.agentLogs.logLevelSelectText": "Niveau du log",
"xpack.fleet.agentLogs.oldAgentWarningTitle": "La vue Logs requiert Elastic Agent 7.11 ou une version ultérieure. Pour mettre à niveau un agent, accédez au menu Actions ou {downloadLink} une version plus récente.",
"xpack.fleet.agentLogs.openInDiscoverUiLinkText": "Ouvrir dans Discover",
"xpack.fleet.agentLogs.resetLogLevel.errorTitleText": "Erreur lors de la réinitialisation du niveau de logging de l'agent",
"xpack.fleet.agentLogs.resetLogLevel.successText": "Réinitialiser le niveau de logging de l'agent pour la politique",
"xpack.fleet.agentLogs.resetLogLevelLabelText": "Réinitialiser pour rétablir la politique",
"xpack.fleet.agentLogs.searchPlaceholderText": "Rechercher dans les logs…",
"xpack.fleet.agentLogs.selectLogLevel.errorTitleText": "Erreur lors de la mise à jour du niveau de logging de l'agent",
"xpack.fleet.agentLogs.selectLogLevel.successText": "Modification du niveau de logging de l'agent en \"{logLevel}\"",
@ -27750,6 +27750,7 @@
"xpack.ml.dataVisualizer.pageHeader": "Data Visualizer (Visualiseur de données)",
"xpack.ml.datavisualizer.selector.dataVisualizerDescription": "L'outil de Machine Learning Data Visualizer (Visualiseur de données) vous aide à comprendre vos données en analysant les indicateurs et les champs dans un fichier log ou un index Elasticsearch existant.",
"xpack.ml.datavisualizer.selector.dataVisualizerTitle": "Data Visualizer (Visualiseur de données)",
"xpack.ml.datavisualizer.selector.esqlTechnicalPreviewBadge.titleMsg": "Le visualiseur de données ES|QL est en version préliminaire technique.",
"xpack.ml.datavisualizer.selector.importDataTitle": "Visualiser les données à partir d'un fichier",
"xpack.ml.datavisualizer.selector.selectDataViewButtonLabel": "Sélectionner la vue de données",
"xpack.ml.datavisualizer.selector.selectDataViewTitle": "Visualiser les données à partir d'une vue de données",
@ -27796,6 +27797,7 @@
"xpack.ml.deepLink.overview": "Aperçu",
"xpack.ml.deepLink.resultExplorer": "Explorateur de résultats",
"xpack.ml.deepLink.singleMetricViewer": "Visionneuse dindicateur unique",
"xpack.ml.deepLink.suppliedConfigurations": "Configurations fournies",
"xpack.ml.deleteSpaceAwareItemCheckModal.buttonTextCanDelete.job": "Continuer pour supprimer {length, plural, one {# tâche} other {# tâches}}",
"xpack.ml.deleteSpaceAwareItemCheckModal.buttonTextCanDelete.model": "Continuer pour supprimer {length, plural, one {# modèle} other {# modèles}}",
"xpack.ml.deleteSpaceAwareItemCheckModal.buttonTextCanUnTagConfirm": "Retirer de l'espace en cours",
@ -32616,6 +32618,20 @@
"xpack.observabilityShared.bottomBarActions.unsavedChanges": "{unsavedChangesCount, plural, =0{0 modification non enregistrée} one {1 modification non enregistrée} other {# modifications non enregistrées}}",
"xpack.observabilityShared.breadcrumbs.observabilityLinkText": "Observabilité",
"xpack.observabilityShared.common.constants.grouping": "Observabilité",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemDescription": "Guides détaillés des fonctionnalités d'Elastic",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemLabel": "Parcourir la documentation",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemLinkARIALabel": "En savoir plus sur toutes les fonctionnalités d'Elastic",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemLinkLabel": "En savoir plus",
"xpack.observabilityShared.experimentalOnboardingFlow.demoEnvironmentFlexItemDescription": "Explorer notre environnement de démonstration en direct",
"xpack.observabilityShared.experimentalOnboardingFlow.demoEnvironmentFlexItemLabel": "Environnement de démonstration",
"xpack.observabilityShared.experimentalOnboardingFlow.demoEnvironmentFlexItemLinkLabel": "Explorer la démonstration",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemDescription": "Échanger à propos d'Elastic",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemLabel": "Explorer le forum",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemLinkARIALabel": "Ouvrir le forum de discussion sur Elastic",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemLinkLabel": "Forum de discussion",
"xpack.observabilityShared.experimentalOnboardingFlow.supportHubFlexItemDescription": "Obtenez de l'aide dans louverture dun cas",
"xpack.observabilityShared.experimentalOnboardingFlow.supportHubFlexItemLabel": "Hub de support technique",
"xpack.observabilityShared.experimentalOnboardingFlow.supportHubFlexItemLinkLabel": "Ouvrir le Hub de support technique",
"xpack.observabilityShared.featureFeedbackButton.tellUsWhatYouThinkLink": "Dites-nous ce que vous pensez !",
"xpack.observabilityShared.fieldValueSelection.apply": "Appliquer",
"xpack.observabilityShared.fieldValueSelection.apply.label": "Appliquer les filtres sélectionnés pour {label}",

View file

@ -10115,7 +10115,6 @@
"xpack.aiAssistant.couldNotFindConversationTitle": "会話が見つかりません",
"xpack.aiAssistant.disclaimer.disclaimerLabel": "この会話は、LLMプロバイダーとの統合によって提供されています。LLMは、正しくない情報を正しい情報であるかのように表示する場合があることが知られています。Elasticは、構成やLLMプロバイダーへの接続、お客様のナレッジベースへの接続はサポートしますが、LLMの応答については責任を負いません。",
"xpack.aiAssistant.emptyConversationTitle": "新しい会話",
"xpack.aiAssistant.errorSettingUpInferenceEndpoint": "推論エンドポイントを作成できませんでした",
"xpack.aiAssistant.errorUpdatingConversation": "会話を更新できませんでした",
"xpack.aiAssistant.executedFunctionFailureEvent": "関数{functionName}の実行に失敗しました",
"xpack.aiAssistant.flyout.confirmDeleteButtonText": "会話を削除",
@ -10146,12 +10145,10 @@
"xpack.aiAssistant.welcomeMessage.modelIsNotDeployedLabel": "モデル\"{modelId}\"はデプロイされていません",
"xpack.aiAssistant.welcomeMessage.modelIsNotFullyAllocatedLabel": "\"{modelId}\"の割り当て状態は{allocationState}です",
"xpack.aiAssistant.welcomeMessage.modelIsNotStartedLabel": "\"{modelId}\"のデプロイ状態は{deploymentState}です",
"xpack.aiAssistant.welcomeMessage.retryButtonLabel": "ナレッジベースをインストール",
"xpack.aiAssistant.welcomeMessage.trainedModelsLinkLabel": "学習済みモデル",
"xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel": "ナレッジベースをセットアップしています。これには数分かかる場合があります。この処理の実行中には、アシスタントを使用し続けることができます。",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsErrorTextLabel": "コネクターを読み込めませんでした",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsForbiddenTextLabel": "コネクターを取得するために必要な権限が不足しています",
"xpack.aiAssistant.welcomeMessageKnowledgeBase.yourKnowledgeBaseIsNotSetUpCorrectlyLabel": "ナレッジベースはセットアップされていません。",
"xpack.aiAssistant.welcomeMessageKnowledgeBaseSetupErrorPanel.retryInstallingLinkLabel": "インストールを再試行",
"xpack.aiops.actions.openChangePointInMlAppName": "AIOps Labsで開く",
"xpack.aiops.analysis.analysisTypeDipFallbackInfoTitle": "ベースライン時間範囲の上位のアイテム",
@ -18553,6 +18550,9 @@
"xpack.fleet.agentLogs.logLevelSelectText": "ログレベル",
"xpack.fleet.agentLogs.oldAgentWarningTitle": "ログの表示には、Elastic Agent 7.11以降が必要です。エージェントをアップグレードするには、[アクション]メニューに移動するか、新しいバージョンを{downloadLink}。",
"xpack.fleet.agentLogs.openInDiscoverUiLinkText": "Discoverで開く",
"xpack.fleet.agentLogs.resetLogLevel.errorTitleText": "エージェントログレベルのリセットエラー",
"xpack.fleet.agentLogs.resetLogLevel.successText": "エージェントログレベルをポリシーにリセット",
"xpack.fleet.agentLogs.resetLogLevelLabelText": "ポリシーにリセット",
"xpack.fleet.agentLogs.searchPlaceholderText": "ログを検索…",
"xpack.fleet.agentLogs.selectLogLevel.errorTitleText": "エージェントログレベルの更新エラー",
"xpack.fleet.agentLogs.selectLogLevel.successText": "エージェントログレベルを''{logLevel}''に変更しました",
@ -27775,6 +27775,7 @@
"xpack.ml.deepLink.overview": "概要",
"xpack.ml.deepLink.resultExplorer": "結果エクスプローラー",
"xpack.ml.deepLink.singleMetricViewer": "シングルメトリックビューアー",
"xpack.ml.deepLink.suppliedConfigurations": "提供された構成",
"xpack.ml.deleteSpaceAwareItemCheckModal.buttonTextCanDelete.job": "続行して、{length, plural, other {# 個のジョブ}}を削除します",
"xpack.ml.deleteSpaceAwareItemCheckModal.buttonTextCanDelete.model": "続行して、{length, plural, other {# 個のモデル}}を削除します",
"xpack.ml.deleteSpaceAwareItemCheckModal.buttonTextCanUnTagConfirm": "現在のスペースから削除",
@ -32597,6 +32598,20 @@
"xpack.observabilityShared.bottomBarActions.unsavedChanges": "{unsavedChangesCount, plural, other {# 未保存変更}}",
"xpack.observabilityShared.breadcrumbs.observabilityLinkText": "Observability",
"xpack.observabilityShared.common.constants.grouping": "Observability",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemDescription": "すべてのElastic機能に関する詳細なガイド",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemLabel": "ドキュメントを参照",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemLinkARIALabel": "すべてのElastic機能の詳細",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemLinkLabel": "詳細",
"xpack.observabilityShared.experimentalOnboardingFlow.demoEnvironmentFlexItemDescription": "Elasticのライブデモを見る",
"xpack.observabilityShared.experimentalOnboardingFlow.demoEnvironmentFlexItemLabel": "デモ環境",
"xpack.observabilityShared.experimentalOnboardingFlow.demoEnvironmentFlexItemLinkLabel": "デモの探索",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemDescription": "Elasticに関する意見を交換",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemLabel": "フォーラムを探索",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemLinkARIALabel": "Elasticディスカッションフォーラムを開く",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemLinkLabel": "ディスカッションフォーラム",
"xpack.observabilityShared.experimentalOnboardingFlow.supportHubFlexItemDescription": "ケースを作成してヘルプを依頼",
"xpack.observabilityShared.experimentalOnboardingFlow.supportHubFlexItemLabel": "サポートハブ",
"xpack.observabilityShared.experimentalOnboardingFlow.supportHubFlexItemLinkLabel": "サポートハブを開く",
"xpack.observabilityShared.featureFeedbackButton.tellUsWhatYouThinkLink": "ご意見をお聞かせください。",
"xpack.observabilityShared.fieldValueSelection.apply": "適用",
"xpack.observabilityShared.fieldValueSelection.apply.label": "{label}に選択したフィルターを適用",

View file

@ -10130,7 +10130,6 @@
"xpack.aiAssistant.couldNotFindConversationTitle": "未找到对话",
"xpack.aiAssistant.disclaimer.disclaimerLabel": "通过集成 LLM 提供商来支持此对话。众所周知LLM 有时会提供错误信息好像它是正确的。Elastic 支持配置并连接到 LLM 提供商和知识库,但不对 LLM 响应负责。",
"xpack.aiAssistant.emptyConversationTitle": "新对话",
"xpack.aiAssistant.errorSettingUpInferenceEndpoint": "无法创建推理终端",
"xpack.aiAssistant.errorUpdatingConversation": "无法更新对话",
"xpack.aiAssistant.executedFunctionFailureEvent": "无法执行函数 {functionName}",
"xpack.aiAssistant.flyout.confirmDeleteButtonText": "删除对话",
@ -10144,7 +10143,7 @@
"xpack.aiAssistant.incorrectLicense.title": "升级您的许可证",
"xpack.aiAssistant.initialSetupPanel.setupConnector.buttonLabel": "设置 GenAI 连接器",
"xpack.aiAssistant.initialSetupPanel.setupConnector.description2": "通过为您的 AI 提供商设置连接器,开始使用 Elastic AI 助手。此模型需要支持函数调用。使用 OpenAI 或 Azure 时,建议使用 GPT4。",
"xpack.aiAssistant.newChatButton": "新聊天",
"xpack.aiAssistant.newChatButton": "新对话",
"xpack.aiAssistant.prompt.placeholder": "向助手发送消息",
"xpack.aiAssistant.promptEditorNaturalLanguage.euiSelectable.selectAnOptionLabel": "选择选项",
"xpack.aiAssistant.settingsPage.goToConnectorsButtonLabel": "管理连接器",
@ -10161,12 +10160,10 @@
"xpack.aiAssistant.welcomeMessage.modelIsNotDeployedLabel": "未部署模型 {modelId}",
"xpack.aiAssistant.welcomeMessage.modelIsNotFullyAllocatedLabel": "{modelId} 的分配状态为 {allocationState}",
"xpack.aiAssistant.welcomeMessage.modelIsNotStartedLabel": "{modelId} 的部署状态为 {deploymentState}",
"xpack.aiAssistant.welcomeMessage.retryButtonLabel": "安装知识库",
"xpack.aiAssistant.welcomeMessage.trainedModelsLinkLabel": "已训练模型",
"xpack.aiAssistant.welcomeMessage.weAreSettingUpTextLabel": "我们正在设置您的知识库。这可能需要若干分钟。此进程处于运行状态时,您可以继续使用该助手。",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsErrorTextLabel": "无法加载连接器",
"xpack.aiAssistant.welcomeMessageConnectors.connectorsForbiddenTextLabel": "缺少获取连接器所需的权限",
"xpack.aiAssistant.welcomeMessageKnowledgeBase.yourKnowledgeBaseIsNotSetUpCorrectlyLabel": "尚未设置您的知识库。",
"xpack.aiAssistant.welcomeMessageKnowledgeBaseSetupErrorPanel.retryInstallingLinkLabel": "重试安装",
"xpack.aiops.actions.openChangePointInMlAppName": "在 Aiops 实验室中打开",
"xpack.aiops.analysis.analysisTypeDipFallbackInfoTitle": "基线时间范围的主要项目",
@ -18593,6 +18590,9 @@
"xpack.fleet.agentLogs.logLevelSelectText": "日志级别",
"xpack.fleet.agentLogs.oldAgentWarningTitle": "“日志”视图需要 Elastic Agent 7.11 或更高版本。要升级代理,请前往“操作”菜单或{downloadLink}更新的版本。",
"xpack.fleet.agentLogs.openInDiscoverUiLinkText": "在 Discover 中打开",
"xpack.fleet.agentLogs.resetLogLevel.errorTitleText": "重置代理日志记录级别时出错",
"xpack.fleet.agentLogs.resetLogLevel.successText": "将代理日志记录级别重置为策略",
"xpack.fleet.agentLogs.resetLogLevelLabelText": "重置为策略",
"xpack.fleet.agentLogs.searchPlaceholderText": "搜索日志……",
"xpack.fleet.agentLogs.selectLogLevel.errorTitleText": "更新代理日志记录级别时出错",
"xpack.fleet.agentLogs.selectLogLevel.successText": "已将代理日志记录级别更改为“{logLevel}”",
@ -27824,6 +27824,7 @@
"xpack.ml.deepLink.overview": "概览",
"xpack.ml.deepLink.resultExplorer": "结果浏览器",
"xpack.ml.deepLink.singleMetricViewer": "Single Metric Viewer",
"xpack.ml.deepLink.suppliedConfigurations": "提供的配置",
"xpack.ml.deleteSpaceAwareItemCheckModal.buttonTextCanDelete.job": "继续删除 {length, plural, other {# 个作业}}",
"xpack.ml.deleteSpaceAwareItemCheckModal.buttonTextCanDelete.model": "继续删除 {length, plural, other {# 个模型}}",
"xpack.ml.deleteSpaceAwareItemCheckModal.buttonTextCanUnTagConfirm": "从当前工作区中移除",
@ -32652,6 +32653,20 @@
"xpack.observabilityShared.bottomBarActions.unsavedChanges": "{unsavedChangesCount, plural, =0{0 个未保存更改} one {1 个未保存更改} other {# 个未保存更改}}",
"xpack.observabilityShared.breadcrumbs.observabilityLinkText": "Observability",
"xpack.observabilityShared.common.constants.grouping": "Observability",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemDescription": "有关所有 Elastic 功能的深入指南",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemLabel": "浏览文档",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemLinkARIALabel": "详细了解所有 Elastic 功能",
"xpack.observabilityShared.experimentalOnboardingFlow.browseDocumentationFlexItemLinkLabel": "了解详情",
"xpack.observabilityShared.experimentalOnboardingFlow.demoEnvironmentFlexItemDescription": "浏览我们的实时演示环境",
"xpack.observabilityShared.experimentalOnboardingFlow.demoEnvironmentFlexItemLabel": "演示环境",
"xpack.observabilityShared.experimentalOnboardingFlow.demoEnvironmentFlexItemLinkLabel": "浏览演示",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemDescription": "交流有关 Elastic 的看法",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemLabel": "浏览论坛",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemLinkARIALabel": "打开 Elastic 讨论论坛",
"xpack.observabilityShared.experimentalOnboardingFlow.exploreForumFlexItemLinkLabel": "讨论论坛",
"xpack.observabilityShared.experimentalOnboardingFlow.supportHubFlexItemDescription": "通过创建案例获取帮助",
"xpack.observabilityShared.experimentalOnboardingFlow.supportHubFlexItemLabel": "支持中心",
"xpack.observabilityShared.experimentalOnboardingFlow.supportHubFlexItemLinkLabel": "打开支持中心",
"xpack.observabilityShared.featureFeedbackButton.tellUsWhatYouThinkLink": "告诉我们您的看法!",
"xpack.observabilityShared.fieldValueSelection.apply": "应用",
"xpack.observabilityShared.fieldValueSelection.apply.label": "为 {label} 应用选定筛选",

View file

@ -6,7 +6,13 @@
*/
export type { Message, Conversation, KnowledgeBaseEntry, ConversationCreateRequest } from './types';
export { KnowledgeBaseEntryRole, MessageRole, ConversationAccess } from './types';
export {
KnowledgeBaseEntryRole,
MessageRole,
ConversationAccess,
KnowledgeBaseType,
KnowledgeBaseState,
} from './types';
export type { FunctionDefinition, CompatibleJSONSchema } from './functions/types';
export { FunctionVisibility } from './functions/function_visibility';
export {
@ -46,5 +52,3 @@ export {
export { concatenateChatCompletionChunks } from './utils/concatenate_chat_completion_chunks';
export { ShortIdTable } from './utils/short_id_table';
export { KnowledgeBaseType, KnowledgeBaseState } from './types';

View file

@ -105,8 +105,9 @@ export enum KnowledgeBaseType {
export enum KnowledgeBaseState {
NOT_INSTALLED = 'NOT_INSTALLED',
PENDING_MODEL_DEPLOYMENT = 'PENDING_MODEL_DEPLOYMENT',
MODEL_PENDING_DEPLOYMENT = 'MODEL_PENDING_DEPLOYMENT',
DEPLOYING_MODEL = 'DEPLOYING_MODEL',
MODEL_PENDING_ALLOCATION = 'MODEL_PENDING_ALLOCATION',
READY = 'READY',
ERROR = 'ERROR',
}

View file

@ -55,6 +55,9 @@ export function registerContextFunction({
};
if (!isKnowledgeBaseReady) {
resources.logger.warn(
'Knowledge base is not ready yet. Returning context function response without knowledge base entries.'
);
return { content };
}

View file

@ -13,7 +13,7 @@ import { MessageRole, ShortIdTable, type Message } from '../../../common';
import { concatenateChatCompletionChunks } from '../../../common/utils/concatenate_chat_completion_chunks';
import { FunctionCallChatFunction } from '../../service/types';
const SELECT_RELEVANT_FIELDS_NAME = 'select_relevant_fields';
export const SELECT_RELEVANT_FIELDS_NAME = 'select_relevant_fields';
export const GET_RELEVANT_FIELD_NAMES_SYSTEM_MESSAGE = `You are a helpful assistant for Elastic Observability.
Your task is to determine which fields are relevant to the conversation by selecting only the field IDs from the provided list.
The list in the user message consists of JSON objects that map a human-readable field "name" to its unique "id".

View file

@ -31,9 +31,7 @@ import { registerFunctions } from './functions';
import { recallRankingEvent } from './analytics/recall_ranking';
import { initLangtrace } from './service/client/instrumentation/init_langtrace';
import { aiAssistantCapabilities } from '../common/capabilities';
import { populateMissingSemanticTextFieldMigration } from './service/startup_migrations/populate_missing_semantic_text_field_migration';
import { updateExistingIndexAssets } from './service/startup_migrations/create_or_update_index_assets';
import { runStartupMigrations } from './service/startup_migrations/run_startup_migrations';
export class ObservabilityAIAssistantPlugin
implements
Plugin<
@ -130,19 +128,12 @@ export class ObservabilityAIAssistantPlugin
}));
// Update existing index assets (mappings, templates, etc). This will not create assets if they do not exist.
updateExistingIndexAssets({ logger: this.logger, core })
.then(() =>
populateMissingSemanticTextFieldMigration({
core,
logger: this.logger,
config: this.config,
})
)
.catch((e) =>
this.logger.error(
`Error during knowledge base migration in AI Assistant plugin startup: ${e.message}`
)
);
runStartupMigrations({
core,
logger: this.logger,
config: this.config,
}).catch((e) => this.logger.error(`Error while running startup migrations: ${e.message}`));
service.register(registerFunctions);

View file

@ -256,6 +256,8 @@ async function chatComplete(
},
} = params;
resources.logger.debug(`Initializing chat request with ${messages.length} messages`);
const { client, isCloudEnabled, signal, simulateFunctionCalling } = await initializeChatRequest(
resources
);

View file

@ -4,7 +4,7 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { notImplemented } from '@hapi/boom';
import * as t from 'io-ts';
import { Conversation, MessageRole } from '../../../common/types';
import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route';
@ -40,10 +40,6 @@ const getConversationRoute = createObservabilityAIAssistantServerRoute({
const client = await service.getClient({ request });
if (!client) {
throw notImplemented();
}
const conversation = await client.get(params.path.conversationId);
// conversation without system messages
return getConversationWithoutSystemMessages(conversation);
@ -67,10 +63,6 @@ const findConversationsRoute = createObservabilityAIAssistantServerRoute({
const client = await service.getClient({ request });
if (!client) {
throw notImplemented();
}
const conversations = await client.find({ query: params?.body?.query });
return {
@ -96,11 +88,6 @@ const createConversationRoute = createObservabilityAIAssistantServerRoute({
const { service, request, params } = resources;
const client = await service.getClient({ request });
if (!client) {
throw notImplemented();
}
return client.create(params.body.conversation);
},
});
@ -121,11 +108,6 @@ const duplicateConversationRoute = createObservabilityAIAssistantServerRoute({
const { service, request, params } = resources;
const client = await service.getClient({ request });
if (!client) {
throw notImplemented();
}
return client.duplicateConversation(params.path.conversationId);
},
});
@ -149,11 +131,6 @@ const updateConversationRoute = createObservabilityAIAssistantServerRoute({
const { service, request, params } = resources;
const client = await service.getClient({ request });
if (!client) {
throw notImplemented();
}
return client.update(params.path.conversationId, params.body.conversation);
},
});
@ -174,11 +151,6 @@ const deleteConversationRoute = createObservabilityAIAssistantServerRoute({
const { service, request, params } = resources;
const client = await service.getClient({ request });
if (!client) {
throw notImplemented();
}
return client.delete(params.path.conversationId);
},
});
@ -204,10 +176,6 @@ const patchConversationRoute = createObservabilityAIAssistantServerRoute({
const client = await service.getClient({ request });
if (!client) {
throw notImplemented();
}
return client.updatePartial({
conversationId: params.path.conversationId,
updates: params.body,

View file

@ -4,7 +4,7 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { notImplemented } from '@hapi/boom';
import { nonEmptyStringRt, toBooleanRt } from '@kbn/io-ts-utils';
import { context as otelContext } from '@opentelemetry/api';
import * as t from 'io-ts';
@ -159,10 +159,6 @@ const functionRecallRoute = createObservabilityAIAssistantServerRoute({
body: { queries, categories },
} = resources.params;
if (!client) {
throw notImplemented();
}
const entries = await client.recall({ queries, categories });
return { entries };
},
@ -188,10 +184,6 @@ const functionSummariseRoute = createObservabilityAIAssistantServerRoute({
handler: async (resources): Promise<void> => {
const client = await resources.service.getClient({ request: resources.request });
if (!client) {
throw notImplemented();
}
const {
title,
confidence,

View file

@ -6,14 +6,14 @@
*/
import pLimit from 'p-limit';
import { notImplemented } from '@hapi/boom';
import { nonEmptyStringRt, toBooleanRt } from '@kbn/io-ts-utils';
import * as t from 'io-ts';
import {
InferenceInferenceEndpointInfo,
MlTrainedModelStats,
} from '@elastic/elasticsearch/lib/api/types';
import moment from 'moment';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import pRetry from 'p-retry';
import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route';
import {
Instruction,
@ -38,84 +38,47 @@ const getKnowledgeBaseStatus = createObservabilityAIAssistantServerRoute({
endpoint?: Partial<InferenceInferenceEndpointInfo>;
modelStats?: Partial<MlTrainedModelStats>;
kbState: KnowledgeBaseState;
currentInferenceId: string | undefined;
concreteWriteIndex: string | undefined;
isReIndexing: boolean;
}> => {
const client = await service.getClient({ request });
if (!client) {
throw notImplemented();
}
return client.getKnowledgeBaseStatus();
},
});
const setupKnowledgeBase = createObservabilityAIAssistantServerRoute({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
params: t.partial({
query: t.partial({
model_id: t.string,
params: t.type({
query: t.type({
inference_id: t.string,
}),
}),
options: {
timeout: {
idleSocket: moment.duration(20, 'minutes').asMilliseconds(),
},
},
security: {
authz: {
requiredPrivileges: ['ai_assistant'],
},
},
handler: async (resources): Promise<InferenceInferenceEndpointInfo> => {
handler: async (
resources
): Promise<{
reindex: boolean;
currentInferenceId: string | undefined;
nextInferenceId: string;
}> => {
const client = await resources.service.getClient({ request: resources.request });
if (!client) {
throw notImplemented();
}
const { model_id: modelId } = resources.params?.query ?? {};
return await client.setupKnowledgeBase(modelId);
const { inference_id: inferenceId } = resources.params.query;
return client.setupKnowledgeBase(inferenceId);
},
});
const resetKnowledgeBase = createObservabilityAIAssistantServerRoute({
endpoint: 'POST /internal/observability_ai_assistant/kb/reset',
security: {
authz: {
requiredPrivileges: ['ai_assistant'],
},
},
handler: async (resources): Promise<{ result: string }> => {
const client = await resources.service.getClient({ request: resources.request });
if (!client) {
throw notImplemented();
}
await client.resetKnowledgeBase();
return { result: 'success' };
},
});
const reIndexKnowledgeBase = createObservabilityAIAssistantServerRoute({
endpoint: 'POST /internal/observability_ai_assistant/kb/reindex',
security: {
authz: {
requiredPrivileges: ['ai_assistant'],
},
},
handler: async (resources): Promise<{ result: boolean }> => {
const client = await resources.service.getClient({ request: resources.request });
const result = await client.reIndexKnowledgeBaseWithLock();
return { result };
},
});
const semanticTextMigrationKnowledgeBase = createObservabilityAIAssistantServerRoute({
endpoint:
'POST /internal/observability_ai_assistant/kb/migrations/populate_missing_semantic_text_field',
const warmupModelKnowledgeBase = createObservabilityAIAssistantServerRoute({
endpoint: 'POST /internal/observability_ai_assistant/kb/warmup_model',
params: t.type({
query: t.type({
inference_id: t.string,
}),
}),
security: {
authz: {
requiredPrivileges: ['ai_assistant'],
@ -123,12 +86,61 @@ const semanticTextMigrationKnowledgeBase = createObservabilityAIAssistantServerR
},
handler: async (resources): Promise<void> => {
const client = await resources.service.getClient({ request: resources.request });
const { inference_id: inferenceId } = resources.params.query;
return client.warmupKbModel(inferenceId);
},
});
if (!client) {
throw notImplemented();
}
const reIndexKnowledgeBase = createObservabilityAIAssistantServerRoute({
endpoint: 'POST /internal/observability_ai_assistant/kb/reindex',
params: t.type({
query: t.type({
inference_id: t.string,
}),
}),
security: {
authz: {
requiredPrivileges: ['ai_assistant'],
},
},
handler: async (resources): Promise<{ result: boolean }> => {
const client = await resources.service.getClient({ request: resources.request });
const { inference_id: inferenceId } = resources.params.query;
const result = await client.reIndexKnowledgeBaseWithLock(inferenceId);
return { result };
},
});
return client.reIndexKnowledgeBaseAndPopulateSemanticTextField();
const startupMigrationsKnowledgeBase = createObservabilityAIAssistantServerRoute({
endpoint: 'POST /internal/observability_ai_assistant/kb/migrations/startup',
security: {
authz: {
requiredPrivileges: ['ai_assistant'],
},
},
handler: async (resources): Promise<void> => {
const client = await resources.service.getClient({ request: resources.request });
return client.runStartupMigrations();
},
});
const getKnowledgeBaseInferenceEndpoints = createObservabilityAIAssistantServerRoute({
endpoint: 'GET /internal/observability_ai_assistant/kb/inference_endpoints',
security: {
authz: {
requiredPrivileges: ['ai_assistant'],
},
},
handler: async (
resources
): Promise<{
endpoints: InferenceAPIConfigResponse[];
}> => {
const client = await resources.service.getClient({ request: resources.request });
return {
endpoints: await client.getInferenceEndpointsForEmbedding(),
};
},
});
@ -146,10 +158,6 @@ const getKnowledgeBaseUserInstructions = createObservabilityAIAssistantServerRou
}> => {
const client = await resources.service.getClient({ request: resources.request });
if (!client) {
throw notImplemented();
}
return {
userInstructions: await client.getKnowledgeBaseUserInstructions(),
};
@ -173,10 +181,6 @@ const saveKnowledgeBaseUserInstruction = createObservabilityAIAssistantServerRou
handler: async (resources): Promise<void> => {
const client = await resources.service.getClient({ request: resources.request });
if (!client) {
throw notImplemented();
}
const { id, text, public: isPublic } = resources.params.body;
return client.addUserInstruction({
entry: { id, text, public: isPublic },
@ -204,14 +208,9 @@ const getKnowledgeBaseEntries = createObservabilityAIAssistantServerRoute({
entries: KnowledgeBaseEntry[];
}> => {
const client = await resources.service.getClient({ request: resources.request });
if (!client) {
throw notImplemented();
}
const { query, sortBy, sortDirection } = resources.params.query;
return await client.getKnowledgeBaseEntries({ query, sortBy, sortDirection });
return client.getKnowledgeBaseEntries({ query, sortBy, sortDirection });
},
});
@ -247,10 +246,6 @@ const saveKnowledgeBaseEntry = createObservabilityAIAssistantServerRoute({
handler: async (resources): Promise<void> => {
const client = await resources.service.getClient({ request: resources.request });
if (!client) {
throw notImplemented();
}
const entry = resources.params.body;
return client.addKnowledgeBaseEntry({
entry: {
@ -279,11 +274,6 @@ const deleteKnowledgeBaseEntry = createObservabilityAIAssistantServerRoute({
},
handler: async (resources): Promise<void> => {
const client = await resources.service.getClient({ request: resources.request });
if (!client) {
throw notImplemented();
}
return client.deleteKnowledgeBaseEntry(resources.params.path.entryId);
},
});
@ -303,10 +293,6 @@ const importKnowledgeBaseEntries = createObservabilityAIAssistantServerRoute({
handler: async (resources): Promise<void> => {
const client = await resources.service.getClient({ request: resources.request });
if (!client) {
throw notImplemented();
}
const { kbState } = await client.getKnowledgeBaseStatus();
if (kbState !== KnowledgeBaseState.READY) {
@ -314,31 +300,39 @@ const importKnowledgeBaseEntries = createObservabilityAIAssistantServerRoute({
}
const limiter = pLimit(5);
const promises = resources.params.body.entries.map(async (entry) => {
return limiter(async () => {
return client.addKnowledgeBaseEntry({
entry: {
confidence: 'high',
is_correction: false,
public: true,
labels: {},
role: KnowledgeBaseEntryRole.UserEntry,
...entry,
return pRetry(
() => {
return client.addKnowledgeBaseEntry({
entry: {
confidence: 'high',
is_correction: false,
public: true,
labels: {},
role: KnowledgeBaseEntryRole.UserEntry,
...entry,
},
});
},
});
{ retries: 10 }
);
});
});
await Promise.all(promises);
resources.logger.info(
`Imported ${resources.params.body.entries.length} knowledge base entries`
);
},
});
export const knowledgeBaseRoutes = {
...reIndexKnowledgeBase,
...semanticTextMigrationKnowledgeBase,
...startupMigrationsKnowledgeBase,
...setupKnowledgeBase,
...resetKnowledgeBase,
...reIndexKnowledgeBase,
...getKnowledgeBaseStatus,
...getKnowledgeBaseEntries,
...saveKnowledgeBaseUserInstruction,
@ -346,4 +340,6 @@ export const knowledgeBaseRoutes = {
...getKnowledgeBaseUserInstructions,
...saveKnowledgeBaseEntry,
...deleteKnowledgeBaseEntry,
...getKnowledgeBaseInferenceEndpoints,
...warmupModelKnowledgeBase,
};

View file

@ -5,21 +5,36 @@
* 2.0.
*/
import { createOrUpdateIndexAssets } from '../../service/startup_migrations/create_or_update_index_assets';
import * as t from 'io-ts';
import { createObservabilityAIAssistantServerRoute } from '../create_observability_ai_assistant_server_route';
import { createOrUpdateConversationIndexAssets } from '../../service/index_assets/create_or_update_conversation_index_assets';
import { createOrUpdateKnowledgeBaseIndexAssets } from '../../service/index_assets/create_or_update_knowledge_base_index_assets';
const createOrUpdateIndexAssetsRoute = createObservabilityAIAssistantServerRoute({
endpoint: 'POST /internal/observability_ai_assistant/index_assets',
params: t.type({
query: t.type({
inference_id: t.string,
}),
}),
security: {
authz: {
requiredPrivileges: ['ai_assistant'],
},
},
handler: async (resources): Promise<void> => {
return createOrUpdateIndexAssets({
const { inference_id: inferenceId } = resources.params.query;
await createOrUpdateConversationIndexAssets({
logger: resources.logger,
core: resources.plugins.core.setup,
});
return createOrUpdateKnowledgeBaseIndexAssets({
logger: resources.logger,
core: resources.plugins.core.setup,
inferenceId,
});
},
});

View file

@ -32,6 +32,7 @@ import type { AssistantScope } from '@kbn/ai-assistant-common';
import type { InferenceClient } from '@kbn/inference-plugin/server';
import { ChatCompleteResponse, FunctionCallingMode, ToolChoiceType } from '@kbn/inference-common';
import { LockAcquisitionError } from '@kbn/lock-manager';
import { resourceNames } from '..';
import {
ChatCompletionChunkEvent,
@ -67,12 +68,15 @@ import { continueConversation } from './operators/continue_conversation';
import { convertInferenceEventsToStreamingEvents } from './operators/convert_inference_events_to_streaming_events';
import { extractMessages } from './operators/extract_messages';
import { getGeneratedTitle } from './operators/get_generated_title';
import { populateMissingSemanticTextFieldMigration } from '../startup_migrations/populate_missing_semantic_text_field_migration';
import { runStartupMigrations } from '../startup_migrations/run_startup_migrations';
import { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
import { ObservabilityAIAssistantConfig } from '../../config';
import { getElserModelId } from '../knowledge_base_service/get_elser_model_id';
import { apmInstrumentation } from './operators/apm_instrumentation';
import { waitForKbModel, warmupModel } from '../inference_endpoint';
import { reIndexKnowledgeBaseWithLock } from '../knowledge_base_service/reindex_knowledge_base';
import { populateMissingSemanticTextFieldWithLock } from '../startup_migrations/populate_missing_semantic_text_fields';
import { createOrUpdateKnowledgeBaseIndexAssets } from '../index_assets/create_or_update_knowledge_base_index_assets';
import { getInferenceIdFromWriteIndex } from '../knowledge_base_service/get_inference_id_from_write_index';
const MAX_FUNCTION_CALLS = 8;
@ -103,7 +107,7 @@ export class ObservabilityAIAssistantClient {
conversationId: string
): Promise<SearchHit<Conversation> | undefined> => {
const response = await this.dependencies.esClient.asInternalUser.search<Conversation>({
index: resourceNames.aliases.conversations,
index: resourceNames.writeIndexAlias.conversations,
query: {
bool: {
filter: [
@ -529,7 +533,7 @@ export class ObservabilityAIAssistantClient {
find = async (options?: { query?: string }): Promise<Conversation[]> => {
const response = await this.dependencies.esClient.asInternalUser.search<Conversation>({
index: resourceNames.aliases.conversations,
index: resourceNames.writeIndexAlias.conversations,
allow_no_indices: true,
query: {
bool: {
@ -594,7 +598,7 @@ export class ObservabilityAIAssistantClient {
);
await this.dependencies.esClient.asInternalUser.index({
index: resourceNames.aliases.conversations,
index: resourceNames.writeIndexAlias.conversations,
document: createdConversation,
refresh: true,
});
@ -662,53 +666,111 @@ export class ObservabilityAIAssistantClient {
);
};
getKnowledgeBaseStatus = () => {
return this.dependencies.knowledgeBaseService.getStatus();
getInferenceEndpointsForEmbedding = () => {
return this.dependencies.knowledgeBaseService.getInferenceEndpointsForEmbedding();
};
setupKnowledgeBase = async (modelId: string | undefined) => {
const { esClient, core, logger, knowledgeBaseService } = this.dependencies;
getKnowledgeBaseStatus = () => {
return this.dependencies.knowledgeBaseService.getModelStatus();
};
if (!modelId) {
modelId = await getElserModelId({ core, logger });
}
setupKnowledgeBase = async (
nextInferenceId: string
): Promise<{
reindex: boolean;
currentInferenceId: string | undefined;
nextInferenceId: string;
}> => {
const { esClient, core, logger } = this.dependencies;
// setup the knowledge base
const res = await knowledgeBaseService.setup(esClient, modelId);
logger.debug(`Setting up knowledge base with inference_id: ${nextInferenceId}`);
populateMissingSemanticTextFieldMigration({
core,
logger,
config: this.dependencies.config,
}).catch((e) => {
this.dependencies.logger.error(
`Failed to populate missing semantic text fields: ${e.message}`
const currentInferenceId = await getInferenceIdFromWriteIndex(esClient).catch(() => {
logger.debug(
`Current KB write index does not have an inference_id. This is to be expected for indices created before 8.16`
);
return undefined;
});
return res;
if (currentInferenceId === nextInferenceId) {
logger.debug('Inference ID is unchanged. No need to re-index knowledge base.');
warmupModel({ esClient, logger, inferenceId: nextInferenceId }).catch(() => {});
return { reindex: false, currentInferenceId, nextInferenceId };
}
await createOrUpdateKnowledgeBaseIndexAssets({
core: this.dependencies.core,
logger: this.dependencies.logger,
inferenceId: nextInferenceId,
});
waitForKbModel({
core: this.dependencies.core,
esClient,
logger,
config: this.dependencies.config,
inferenceId: nextInferenceId,
})
.then(async () => {
logger.info(
`Inference ID has changed from "${currentInferenceId}" to "${nextInferenceId}". Re-indexing knowledge base.`
);
await reIndexKnowledgeBaseWithLock({
core,
logger,
esClient,
inferenceId: nextInferenceId,
});
await populateMissingSemanticTextFieldWithLock({
core,
logger,
config: this.dependencies.config,
esClient: this.dependencies.esClient,
});
})
.catch((e) => {
const isLockAcquisitionError = e instanceof LockAcquisitionError;
if (isLockAcquisitionError) {
logger.info(e.message);
} else {
logger.error(
`Failed to setup knowledge base with inference_id: ${nextInferenceId}. Error: ${e.message}`
);
logger.debug(e);
}
});
return { reindex: true, currentInferenceId, nextInferenceId };
};
resetKnowledgeBase = () => {
const { esClient } = this.dependencies;
return this.dependencies.knowledgeBaseService.reset(esClient);
warmupKbModel = (inferenceId: string) => {
return waitForKbModel({
core: this.dependencies.core,
esClient: this.dependencies.esClient,
logger: this.dependencies.logger,
config: this.dependencies.config,
inferenceId,
});
};
reIndexKnowledgeBaseWithLock = () => {
reIndexKnowledgeBaseWithLock = (inferenceId: string) => {
return reIndexKnowledgeBaseWithLock({
core: this.dependencies.core,
esClient: this.dependencies.esClient,
logger: this.dependencies.logger,
inferenceId,
});
};
reIndexKnowledgeBaseAndPopulateSemanticTextField = () => {
return populateMissingSemanticTextFieldMigration({
runStartupMigrations = () => {
return runStartupMigrations({
core: this.dependencies.core,
logger: this.dependencies.logger,
config: this.dependencies.config,
});
};
addUserInstruction = async ({
entry,
}: {

View file

@ -17,9 +17,9 @@ import { ObservabilityAIAssistantClient } from './client';
import { KnowledgeBaseService } from './knowledge_base_service';
import type { RegistrationCallback, RespondFunctionResources } from './types';
import { ObservabilityAIAssistantConfig } from '../config';
import { createOrUpdateIndexAssets } from './startup_migrations/create_or_update_index_assets';
import { createOrUpdateConversationIndexAssets } from './index_assets/create_or_update_conversation_index_assets';
function getResourceName(resource: string) {
export function getResourceName(resource: string) {
return `.kibana-observability-ai-assistant-${resource}`;
}
@ -28,7 +28,7 @@ export const resourceNames = {
conversations: getResourceName('component-template-conversations'),
kb: getResourceName('component-template-kb'),
},
aliases: {
writeIndexAlias: {
conversations: getResourceName('conversations'),
kb: getResourceName('kb'),
},
@ -40,15 +40,15 @@ export const resourceNames = {
conversations: getResourceName('index-template-conversations'),
kb: getResourceName('index-template-kb'),
},
concreteIndexName: {
concreteWriteIndexName: {
conversations: getResourceName('conversations-000001'),
kb: getResourceName('kb-000001'),
},
};
const createIndexAssetsOnce = once(
const createConversationIndexAssetsOnce = once(
(logger: Logger, core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>) =>
pRetry(() => createOrUpdateIndexAssets({ logger, core }))
pRetry(() => createOrUpdateConversationIndexAssets({ logger, core }))
);
export class ObservabilityAIAssistantService {
@ -86,7 +86,7 @@ export class ObservabilityAIAssistantService {
const [[coreStart, plugins]] = await Promise.all([
this.core.getStartServices(),
createIndexAssetsOnce(this.logger, this.core),
createConversationIndexAssetsOnce(this.logger, this.core),
]);
// user will not be found when executed from system connector context

View file

@ -0,0 +1,69 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { createConcreteWriteIndex, getDataStreamAdapter } from '@kbn/alerting-plugin/server';
import type { CoreSetup, Logger } from '@kbn/core/server';
import type { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
import { conversationComponentTemplate } from './templates/conversation_component_template';
import { resourceNames } from '..';
export async function createOrUpdateConversationIndexAssets({
logger,
core,
}: {
logger: Logger;
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
}) {
try {
logger.debug('Setting up index assets');
const [coreStart] = await core.getStartServices();
const { asInternalUser } = coreStart.elasticsearch.client;
// Conversations: component template
await asInternalUser.cluster.putComponentTemplate({
create: false,
name: resourceNames.componentTemplate.conversations,
template: conversationComponentTemplate,
});
// Conversations: index template
await asInternalUser.indices.putIndexTemplate({
name: resourceNames.indexTemplate.conversations,
composed_of: [resourceNames.componentTemplate.conversations],
create: false,
index_patterns: [resourceNames.indexPatterns.conversations],
template: {
settings: {
number_of_shards: 1,
auto_expand_replicas: '0-1',
hidden: true,
},
},
});
// Conversations: write index
const conversationAliasName = resourceNames.writeIndexAlias.conversations;
await createConcreteWriteIndex({
esClient: asInternalUser,
logger,
totalFieldsLimit: 10000,
indexPatterns: {
alias: conversationAliasName,
pattern: `${conversationAliasName}*`,
basePattern: `${conversationAliasName}*`,
name: resourceNames.concreteWriteIndexName.conversations,
template: resourceNames.indexTemplate.conversations,
},
dataStreamAdapter: getDataStreamAdapter({ useDataStreamForAlerts: false }),
});
logger.info('Successfully set up conversation index assets');
} catch (error) {
logger.error(`Failed setting up conversation index assets: ${error.message}`);
logger.debug(error);
}
}

View file

@ -0,0 +1,71 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { CoreSetup, Logger } from '@kbn/core/server';
import { createConcreteWriteIndex, getDataStreamAdapter } from '@kbn/alerting-plugin/server';
import type { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
import { getComponentTemplate } from './templates/kb_component_template';
import { resourceNames } from '..';
export async function createOrUpdateKnowledgeBaseIndexAssets({
logger,
core,
inferenceId,
}: {
logger: Logger;
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
inferenceId: string;
}) {
try {
logger.debug('Setting up knowledge base index assets');
const [coreStart] = await core.getStartServices();
const { asInternalUser } = coreStart.elasticsearch.client;
// Knowledge base: component template
await asInternalUser.cluster.putComponentTemplate({
create: false,
name: resourceNames.componentTemplate.kb,
template: getComponentTemplate(inferenceId),
});
// Knowledge base: index template
await asInternalUser.indices.putIndexTemplate({
name: resourceNames.indexTemplate.kb,
composed_of: [resourceNames.componentTemplate.kb],
create: false,
index_patterns: [resourceNames.indexPatterns.kb],
template: {
settings: {
number_of_shards: 1,
auto_expand_replicas: '0-1',
hidden: true,
},
},
});
// Knowledge base: write index
const kbAliasName = resourceNames.writeIndexAlias.kb;
await createConcreteWriteIndex({
esClient: asInternalUser,
logger,
totalFieldsLimit: 10000,
indexPatterns: {
alias: kbAliasName,
pattern: `${kbAliasName}*`,
basePattern: `${kbAliasName}*`,
name: resourceNames.concreteWriteIndexName.kb,
template: resourceNames.indexTemplate.kb,
},
dataStreamAdapter: getDataStreamAdapter({ useDataStreamForAlerts: false }),
});
logger.info('Successfully set up knowledge base index assets');
} catch (error) {
logger.error(`Failed setting up knowledge base index assets: ${error.message}`);
logger.debug(error);
}
}

View file

@ -0,0 +1,81 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ClusterComponentTemplate } from '@elastic/elasticsearch/lib/api/types';
const keyword = {
type: 'keyword' as const,
ignore_above: 1024,
};
const text = {
type: 'text' as const,
};
const date = {
type: 'date' as const,
};
const dynamic = {
type: 'object' as const,
dynamic: true,
};
export function getComponentTemplate(inferenceId: string) {
const kbComponentTemplate: ClusterComponentTemplate['component_template']['template'] = {
mappings: {
dynamic: false,
properties: {
'@timestamp': date,
id: keyword,
doc_id: { type: 'text', fielddata: true }, // deprecated but kept for backwards compatibility
title: {
type: 'text',
fields: {
keyword: {
type: 'keyword',
ignore_above: 256,
},
},
},
user: {
properties: {
id: keyword,
name: keyword,
},
},
type: keyword,
labels: dynamic,
conversation: {
properties: {
id: keyword,
title: text,
last_updated: date,
},
},
namespace: keyword,
text,
semantic_text: {
type: 'semantic_text',
inference_id: inferenceId,
},
'ml.tokens': {
type: 'rank_features',
},
confidence: keyword,
is_correction: {
type: 'boolean',
},
public: {
type: 'boolean',
},
},
},
};
return kbComponentTemplate;
}

View file

@ -0,0 +1,56 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import type { CoreSetup, Logger } from '@kbn/core/server';
import type { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
import { createOrUpdateConversationIndexAssets } from './create_or_update_conversation_index_assets';
import { createOrUpdateKnowledgeBaseIndexAssets } from './create_or_update_knowledge_base_index_assets';
import { hasKbWriteIndex } from '../knowledge_base_service/has_kb_index';
import { getInferenceIdFromWriteIndex } from '../knowledge_base_service/get_inference_id_from_write_index';
import { resourceNames } from '..';
export const DEFAULT_INFERENCE_ENDPOINT = '.elser-2-elasticsearch';
export async function updateExistingIndexAssets({
logger,
core,
}: {
logger: Logger;
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
}) {
const [coreStart] = await core.getStartServices();
const esClient = coreStart.elasticsearch.client;
const doesKbIndexExist = await hasKbWriteIndex({ esClient });
const doesConversationIndexExist = await esClient.asInternalUser.indices.exists({
index: resourceNames.writeIndexAlias.conversations,
});
if (!doesKbIndexExist && !doesConversationIndexExist) {
logger.warn('Index assets do not exist. Aborting updating index assets');
return;
}
if (doesConversationIndexExist) {
logger.debug('Found index for conversations. Updating index assets.');
await createOrUpdateConversationIndexAssets({ logger, core });
}
if (doesKbIndexExist) {
logger.debug('Found index for knowledge base. Updating index assets.');
const currentInferenceId = await getInferenceIdFromWriteIndex(esClient).catch(() => {
logger.debug(
`Current KB write index does not have an inference_id. This is to be expected for indices created before 8.16`
);
return DEFAULT_INFERENCE_ENDPOINT;
});
await createOrUpdateKnowledgeBaseIndexAssets({ logger, core, inferenceId: currentInferenceId });
}
}

View file

@ -8,86 +8,71 @@
import { errors } from '@elastic/elasticsearch';
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { Logger } from '@kbn/logging';
import moment from 'moment';
import pRetry from 'p-retry';
import {
InferenceInferenceEndpointInfo,
MlGetTrainedModelsStatsResponse,
MlTrainedModelStats,
} from '@elastic/elasticsearch/lib/api/types';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import pRetry from 'p-retry';
import { CoreSetup } from '@kbn/core/server';
import { KnowledgeBaseState } from '../../common';
import { ObservabilityAIAssistantConfig } from '../config';
import {
getConcreteWriteIndex,
getInferenceIdFromWriteIndex,
} from './knowledge_base_service/get_inference_id_from_write_index';
import { isReIndexInProgress } from './knowledge_base_service/reindex_knowledge_base';
import { ObservabilityAIAssistantPluginStartDependencies } from '../types';
export const AI_ASSISTANT_KB_INFERENCE_ID = 'obs_ai_assistant_kb_inference';
const SUPPORTED_TASK_TYPES = ['sparse_embedding', 'text_embedding'];
export async function createInferenceEndpoint({
export const getInferenceEndpointsForEmbedding = async ({
esClient,
logger,
modelId,
}: {
esClient: {
asCurrentUser: ElasticsearchClient;
};
logger: Logger;
modelId: string;
}) {
try {
logger.debug(`Creating inference endpoint "${AI_ASSISTANT_KB_INFERENCE_ID}"`);
return await esClient.asCurrentUser.inference.put(
{
inference_id: AI_ASSISTANT_KB_INFERENCE_ID,
task_type: 'sparse_embedding',
inference_config: {
service: 'elasticsearch',
service_settings: {
model_id: modelId,
adaptive_allocations: { enabled: true, min_number_of_allocations: 1 },
num_threads: 1,
},
task_settings: {},
},
},
{
requestTimeout: moment.duration(2, 'minutes').asMilliseconds(),
}
);
} catch (e) {
logger.error(
`Failed to create inference endpoint "${AI_ASSISTANT_KB_INFERENCE_ID}": ${e.message}`
);
throw e;
}
}
export async function deleteInferenceEndpoint({
esClient,
}: {
esClient: {
asCurrentUser: ElasticsearchClient;
};
}) {
const response = await esClient.asCurrentUser.inference.delete({
inference_id: AI_ASSISTANT_KB_INFERENCE_ID,
force: true,
});
return response;
}
export async function getInferenceEndpoint({
esClient,
}: {
esClient: { asInternalUser: ElasticsearchClient };
logger: Logger;
}): Promise<{
inferenceEndpoints: InferenceAPIConfigResponse[];
}> => {
const { endpoints } = await esClient.asInternalUser.inference.get({
inference_id: '_all',
});
if (!endpoints.length) {
throw new Error('Did not find any inference endpoints');
}
const embeddingEndpoints = endpoints.filter((endpoint) =>
SUPPORTED_TASK_TYPES.includes(endpoint.task_type)
);
if (!embeddingEndpoints.length) {
throw new Error('Did not find any inference endpoints for embedding');
}
logger.debug(`Found ${embeddingEndpoints.length} inference endpoints for supported task types`);
return {
inferenceEndpoints: embeddingEndpoints as InferenceAPIConfigResponse[],
};
};
async function getInferenceEndpoint({
esClient,
inferenceId,
}: {
esClient: { asInternalUser: ElasticsearchClient };
inferenceId: string;
}) {
const response = await esClient.asInternalUser.inference.get({
inference_id: AI_ASSISTANT_KB_INFERENCE_ID,
inference_id: inferenceId,
});
if (response.endpoints.length === 0) {
throw new Error('Inference endpoint not found');
}
return response.endpoints[0];
}
@ -100,51 +85,104 @@ export function isInferenceEndpointMissingOrUnavailable(error: Error) {
}
export async function getKbModelStatus({
core,
esClient,
logger,
config,
inferenceId,
}: {
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
esClient: { asInternalUser: ElasticsearchClient };
logger: Logger;
config: ObservabilityAIAssistantConfig;
inferenceId?: string;
}): Promise<{
enabled: boolean;
endpoint?: InferenceInferenceEndpointInfo;
modelStats?: MlTrainedModelStats;
errorMessage?: string;
kbState: KnowledgeBaseState;
currentInferenceId: string | undefined;
concreteWriteIndex: string | undefined;
isReIndexing: boolean;
}> {
const enabled = config.enableKnowledgeBase;
const concreteWriteIndex = await getConcreteWriteIndex(esClient);
const isReIndexing = await isReIndexInProgress({ esClient, logger, core });
const currentInferenceId = await getInferenceIdFromWriteIndex(esClient).catch(() => undefined);
if (!inferenceId) {
if (!currentInferenceId) {
logger.error('Inference id not provided and not found in write index');
return {
enabled,
errorMessage: 'Inference id not found',
kbState: KnowledgeBaseState.NOT_INSTALLED,
currentInferenceId,
concreteWriteIndex,
isReIndexing,
};
}
logger.debug(`Using current inference id "${currentInferenceId}" from write index`);
inferenceId = currentInferenceId;
}
let endpoint: InferenceInferenceEndpointInfo;
try {
endpoint = await getInferenceEndpoint({ esClient });
endpoint = await getInferenceEndpoint({ esClient, inferenceId });
logger.debug(
`Inference endpoint "${inferenceId}" found with model id "${endpoint?.service_settings?.model_id}"`
);
} catch (error) {
if (!isInferenceEndpointMissingOrUnavailable(error)) {
throw error;
}
return { enabled, errorMessage: error.message, kbState: KnowledgeBaseState.NOT_INSTALLED };
logger.error(`Inference endpoint "${inferenceId}" not found or unavailable: ${error.message}`);
return {
enabled,
errorMessage: error.message,
kbState: KnowledgeBaseState.NOT_INSTALLED,
currentInferenceId,
concreteWriteIndex,
isReIndexing,
};
}
const modelId = endpoint?.service_settings?.model_id;
let trainedModelStatsResponse: MlGetTrainedModelsStatsResponse;
try {
trainedModelStatsResponse = await esClient.asInternalUser.ml.getTrainedModelsStats({
model_id: endpoint.service_settings?.model_id,
model_id: modelId,
});
} catch (error) {
logger.error(`Failed to get model stats: ${error.message}`);
return { enabled, errorMessage: error.message, kbState: KnowledgeBaseState.ERROR };
logger.debug(
`Failed to get model stats for model "${modelId}" and inference id ${inferenceId}: ${error.message}`
);
return {
enabled,
endpoint,
errorMessage: error.message,
kbState: KnowledgeBaseState.NOT_INSTALLED,
currentInferenceId,
concreteWriteIndex,
isReIndexing,
};
}
const modelStats = trainedModelStatsResponse.trained_model_stats.find(
(stats) => stats.deployment_stats?.deployment_id === AI_ASSISTANT_KB_INFERENCE_ID
(stats) => stats.deployment_stats?.deployment_id === inferenceId
);
let kbState: KnowledgeBaseState;
if (!modelStats) {
kbState = KnowledgeBaseState.PENDING_MODEL_DEPLOYMENT;
} else if (modelStats.deployment_stats?.state === 'failed') {
if (trainedModelStatsResponse.trained_model_stats?.length && !modelStats) {
// model has been deployed at least once, but stopped later
kbState = KnowledgeBaseState.MODEL_PENDING_DEPLOYMENT;
} else if (modelStats?.deployment_stats?.state === 'failed') {
kbState = KnowledgeBaseState.ERROR;
} else if (
modelStats?.deployment_stats?.state === 'starting' &&
@ -157,6 +195,13 @@ export async function getKbModelStatus({
modelStats?.deployment_stats?.allocation_status?.allocation_count > 0
) {
kbState = KnowledgeBaseState.READY;
} else if (
modelStats?.deployment_stats?.state === 'started' &&
modelStats?.deployment_stats?.allocation_status?.state === 'fully_allocated' &&
modelStats?.deployment_stats?.allocation_status?.allocation_count === 0
) {
// model has been scaled down due to inactivity
kbState = KnowledgeBaseState.MODEL_PENDING_ALLOCATION;
} else {
kbState = KnowledgeBaseState.ERROR;
}
@ -166,21 +211,32 @@ export async function getKbModelStatus({
enabled,
modelStats,
kbState,
currentInferenceId,
concreteWriteIndex,
isReIndexing,
};
}
export async function waitForKbModel({
core,
esClient,
logger,
config,
inferenceId,
}: {
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
esClient: { asInternalUser: ElasticsearchClient };
logger: Logger;
config: ObservabilityAIAssistantConfig;
inferenceId: string;
}) {
// Run a dummy inference to trigger the model to deploy
// This is a workaround for the fact that the model may not be deployed yet
await warmupModel({ esClient, logger, inferenceId }).catch(() => {});
return pRetry(
async () => {
const { kbState } = await getKbModelStatus({ esClient, logger, config });
const { kbState } = await getKbModelStatus({ core, esClient, logger, config, inferenceId });
if (kbState !== KnowledgeBaseState.READY) {
logger.debug('Knowledge base model is not yet ready. Retrying...');
@ -190,3 +246,25 @@ export async function waitForKbModel({
{ retries: 30, factor: 2, maxTimeout: 30_000 }
);
}
export async function warmupModel({
esClient,
logger,
inferenceId,
}: {
esClient: { asInternalUser: ElasticsearchClient };
logger: Logger;
inferenceId: string;
}) {
logger.debug(`Running inference to trigger model deployment for "${inferenceId}"`);
await pRetry(
() =>
esClient.asInternalUser.inference.inference({
inference_id: inferenceId,
input: 'hello world',
}),
{ retries: 10 }
).catch((error) => {
logger.error(`Unable to run inference on endpoint "${inferenceId}": ${error.message}`);
});
}

View file

@ -1,78 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ClusterComponentTemplate } from '@elastic/elasticsearch/lib/api/types';
import { AI_ASSISTANT_KB_INFERENCE_ID } from './inference_endpoint';
const keyword = {
type: 'keyword' as const,
ignore_above: 1024,
};
const text = {
type: 'text' as const,
};
const date = {
type: 'date' as const,
};
const dynamic = {
type: 'object' as const,
dynamic: true,
};
export const kbComponentTemplate: ClusterComponentTemplate['component_template']['template'] = {
mappings: {
dynamic: false,
properties: {
'@timestamp': date,
id: keyword,
doc_id: { type: 'text', fielddata: true }, // deprecated but kept for backwards compatibility
title: {
type: 'text',
fields: {
keyword: {
type: 'keyword',
ignore_above: 256,
},
},
},
user: {
properties: {
id: keyword,
name: keyword,
},
},
type: keyword,
labels: dynamic,
conversation: {
properties: {
id: keyword,
title: text,
last_updated: date,
},
},
namespace: keyword,
text,
semantic_text: {
type: 'semantic_text',
inference_id: AI_ASSISTANT_KB_INFERENCE_ID,
},
'ml.tokens': {
type: 'rank_features',
},
confidence: keyword,
is_correction: {
type: 'boolean',
},
public: {
type: 'boolean',
},
},
},
};

View file

@ -0,0 +1,48 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { errors } from '@elastic/elasticsearch';
import { Logger } from '@kbn/logging';
export async function createKnowledgeBaseIndex({
esClient,
logger,
inferenceId,
indexName,
}: {
esClient: { asInternalUser: ElasticsearchClient };
logger: Logger;
inferenceId: string;
indexName: string;
}) {
logger.debug(`Creating knowledge base write index "${indexName}"`);
try {
await esClient.asInternalUser.indices.create({
index: indexName,
mappings: {
properties: {
semantic_text: {
type: 'semantic_text',
inference_id: inferenceId,
},
},
},
});
} catch (error) {
if (
error instanceof errors.ResponseError &&
error?.body?.error?.type === 'resource_already_exists_exception'
) {
throw new Error(
`Write index "${indexName}" already exists. Please delete it before creating a new index.`
);
}
throw error;
}
}

View file

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { MappingSemanticTextProperty } from '@elastic/elasticsearch/lib/api/types';
import { first } from 'lodash';
import { resourceNames } from '..';
export async function getConcreteWriteIndex(esClient: { asInternalUser: ElasticsearchClient }) {
try {
const res = await esClient.asInternalUser.indices.getAlias({
name: resourceNames.writeIndexAlias.kb,
});
return first(Object.keys(res));
} catch (error) {
return;
}
}
export async function getInferenceIdFromWriteIndex(esClient: {
asInternalUser: ElasticsearchClient;
}): Promise<string> {
const response = await esClient.asInternalUser.indices.getMapping({
index: resourceNames.writeIndexAlias.kb,
});
const [indexName, indexMappings] = Object.entries(response)[0];
const inferenceId = (
indexMappings.mappings?.properties?.semantic_text as MappingSemanticTextProperty
)?.inference_id;
if (!inferenceId) {
throw new Error(`inference_id not found in field mappings for index ${indexName}`);
}
return inferenceId;
}

View file

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { resourceNames } from '..';
export async function hasKbWriteIndex({
esClient,
}: {
esClient: { asInternalUser: ElasticsearchClient };
}) {
return esClient.asInternalUser.indices.exists({
index: resourceNames.writeIndexAlias.kb,
});
}

View file

@ -22,19 +22,18 @@ import { getAccessQuery, getUserAccessFilters } from '../util/get_access_query';
import { getCategoryQuery } from '../util/get_category_query';
import { getSpaceQuery } from '../util/get_space_query';
import {
createInferenceEndpoint,
deleteInferenceEndpoint,
getInferenceEndpointsForEmbedding,
getKbModelStatus,
isInferenceEndpointMissingOrUnavailable,
} from '../inference_endpoint';
import { recallFromSearchConnectors } from './recall_from_search_connectors';
import { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
import { ObservabilityAIAssistantConfig } from '../../config';
import {
isKnowledgeBaseIndexWriteBlocked,
isSemanticTextUnsupportedError,
reIndexKnowledgeBaseWithLock,
} from './reindex_knowledge_base';
import { hasKbWriteIndex } from './has_kb_index';
import { getInferenceIdFromWriteIndex } from './get_inference_id_from_write_index';
import { reIndexKnowledgeBaseWithLock } from './reindex_knowledge_base';
import { isSemanticTextUnsupportedError } from '../startup_migrations/run_startup_migrations';
import { isKnowledgeBaseIndexWriteBlocked } from './index_write_block_utils';
interface Dependencies {
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
@ -54,35 +53,13 @@ export interface RecalledEntry {
labels?: Record<string, string>;
}
function throwKnowledgeBaseNotReady(body: any) {
throw serverUnavailable(`Knowledge base is not ready yet`, body);
function throwKnowledgeBaseNotReady(error: Error) {
throw serverUnavailable(`Knowledge base is not ready yet: ${error.message}`);
}
export class KnowledgeBaseService {
constructor(private readonly dependencies: Dependencies) {}
async setup(
esClient: {
asCurrentUser: ElasticsearchClient;
asInternalUser: ElasticsearchClient;
},
modelId: string
) {
await deleteInferenceEndpoint({ esClient }).catch((e) => {}); // ensure existing inference endpoint is deleted
return createInferenceEndpoint({ esClient, logger: this.dependencies.logger, modelId });
}
async reset(esClient: { asCurrentUser: ElasticsearchClient }) {
try {
await deleteInferenceEndpoint({ esClient });
} catch (error) {
if (isInferenceEndpointMissingOrUnavailable(error)) {
return;
}
throw error;
}
}
private async recallFromKnowledgeBase({
queries,
categories,
@ -97,7 +74,7 @@ export class KnowledgeBaseService {
const response = await this.dependencies.esClient.asInternalUser.search<
Pick<KnowledgeBaseEntry, 'text' | 'is_correction' | 'labels' | 'title'> & { doc_id?: string }
>({
index: [resourceNames.aliases.kb],
index: [resourceNames.writeIndexAlias.kb],
query: {
bool: {
should: queries.map(({ text, boost = 1 }) => ({
@ -168,7 +145,7 @@ export class KnowledgeBaseService {
namespace,
}).catch((error) => {
if (isInferenceEndpointMissingOrUnavailable(error)) {
throwKnowledgeBaseNotReady(error.body);
throwKnowledgeBaseNotReady(error);
}
throw error;
}),
@ -227,9 +204,16 @@ export class KnowledgeBaseService {
if (!this.dependencies.config.enableKnowledgeBase) {
return [];
}
const doesKbIndexExist = await hasKbWriteIndex({ esClient: this.dependencies.esClient });
if (!doesKbIndexExist) {
return [];
}
try {
const response = await this.dependencies.esClient.asInternalUser.search<KnowledgeBaseEntry>({
index: resourceNames.aliases.kb,
index: resourceNames.writeIndexAlias.kb,
query: {
bool: {
filter: [
@ -277,7 +261,7 @@ export class KnowledgeBaseService {
const response = await this.dependencies.esClient.asInternalUser.search<
KnowledgeBaseEntry & { doc_id?: string }
>({
index: resourceNames.aliases.kb,
index: resourceNames.writeIndexAlias.kb,
query: {
bool: {
filter: [
@ -298,10 +282,7 @@ export class KnowledgeBaseService {
},
sort:
sortBy === 'title'
? [
{ ['title.keyword']: { order: sortDirection } },
{ doc_id: { order: sortDirection } }, // sort by doc_id for backwards compatibility
]
? [{ ['title.keyword']: { order: sortDirection } }]
: [{ [String(sortBy)]: { order: sortDirection } }],
size: 500,
_source: {
@ -332,12 +313,28 @@ export class KnowledgeBaseService {
};
} catch (error) {
if (isInferenceEndpointMissingOrUnavailable(error)) {
throwKnowledgeBaseNotReady(error.body);
throwKnowledgeBaseNotReady(error);
}
throw error;
}
};
hasEntries = async () => {
const response = await this.dependencies.esClient.asInternalUser.search<KnowledgeBaseEntry>({
index: resourceNames.writeIndexAlias.kb,
size: 0,
track_total_hits: 1,
terminate_after: 1,
});
const hitCount =
typeof response.hits.total === 'number'
? response.hits.total
: response.hits.total?.value ?? 0;
return hitCount > 0;
};
getPersonalUserInstructionId = async ({
isPublic,
user,
@ -351,7 +348,7 @@ export class KnowledgeBaseService {
return null;
}
const res = await this.dependencies.esClient.asInternalUser.search<KnowledgeBaseEntry>({
index: resourceNames.aliases.kb,
index: resourceNames.writeIndexAlias.kb,
query: {
bool: {
filter: [
@ -399,7 +396,7 @@ export class KnowledgeBaseService {
const response = await this.dependencies.esClient.asInternalUser.search<KnowledgeBaseEntry>({
size: 1,
index: resourceNames.aliases.kb,
index: resourceNames.writeIndexAlias.kb,
query,
_source: false,
});
@ -424,7 +421,7 @@ export class KnowledgeBaseService {
await this.dependencies.esClient.asInternalUser.index<
Omit<KnowledgeBaseEntry, 'id'> & { namespace: string }
>({
index: resourceNames.aliases.kb,
index: resourceNames.writeIndexAlias.kb,
id,
document: {
'@timestamp': new Date().toISOString(),
@ -440,24 +437,27 @@ export class KnowledgeBaseService {
} catch (error) {
this.dependencies.logger.error(`Failed to add entry to knowledge base ${error}`);
if (isInferenceEndpointMissingOrUnavailable(error)) {
throwKnowledgeBaseNotReady(error.body);
throwKnowledgeBaseNotReady(error);
}
if (isSemanticTextUnsupportedError(error)) {
const inferenceId = await getInferenceIdFromWriteIndex(this.dependencies.esClient);
reIndexKnowledgeBaseWithLock({
core: this.dependencies.core,
logger: this.dependencies.logger,
esClient: this.dependencies.esClient,
inferenceId,
}).catch((e) => {
if (error instanceof LockAcquisitionError) {
this.dependencies.logger.debug(`Re-indexing operation is already in progress`);
this.dependencies.logger.info(`Re-indexing operation is already in progress`);
return;
}
this.dependencies.logger.error(`Failed to re-index knowledge base: ${e.message}`);
});
throw serverUnavailable(
`The index "${resourceNames.aliases.kb}" does not support semantic text and must be reindexed. This re-index operation has been scheduled and will be started automatically. Please try again later.`
`The index "${resourceNames.writeIndexAlias.kb}" does not support semantic text and must be reindexed. This re-index operation has been scheduled and will be started automatically. Please try again later.`
);
}
@ -474,7 +474,7 @@ export class KnowledgeBaseService {
deleteEntry = async ({ id }: { id: string }): Promise<void> => {
try {
await this.dependencies.esClient.asInternalUser.delete({
index: resourceNames.aliases.kb,
index: resourceNames.writeIndexAlias.kb,
id,
refresh: 'wait_for',
});
@ -482,25 +482,27 @@ export class KnowledgeBaseService {
return Promise.resolve();
} catch (error) {
if (isInferenceEndpointMissingOrUnavailable(error)) {
throwKnowledgeBaseNotReady(error.body);
throwKnowledgeBaseNotReady(error);
}
throw error;
}
};
getStatus = async () => {
const { enabled, errorMessage, endpoint, modelStats, kbState } = await getKbModelStatus({
getModelStatus = async () => {
return getKbModelStatus({
core: this.dependencies.core,
esClient: this.dependencies.esClient,
logger: this.dependencies.logger,
config: this.dependencies.config,
});
};
return {
enabled,
errorMessage,
endpoint,
modelStats,
kbState,
};
getInferenceEndpointsForEmbedding = async () => {
const { inferenceEndpoints } = await getInferenceEndpointsForEmbedding({
esClient: this.dependencies.esClient,
logger: this.dependencies.logger,
});
return inferenceEndpoints;
};
}

View file

@ -0,0 +1,77 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { errors } from '@elastic/elasticsearch';
import { ElasticsearchClient, Logger } from '@kbn/core/server';
import pRetry from 'p-retry';
import { resourceNames } from '..';
export async function addIndexWriteBlock({
esClient,
index,
}: {
esClient: { asInternalUser: ElasticsearchClient };
index: string;
}) {
await esClient.asInternalUser.indices.addBlock({ index, block: 'write' });
}
export function removeIndexWriteBlock({
esClient,
index,
}: {
esClient: { asInternalUser: ElasticsearchClient };
index: string;
}) {
return esClient.asInternalUser.indices.putSettings({
index,
body: { 'index.blocks.write': false },
});
}
export async function hasIndexWriteBlock({
esClient,
index,
}: {
esClient: { asInternalUser: ElasticsearchClient };
index: string;
}) {
const response = await esClient.asInternalUser.indices.getSettings({ index });
const writeBlockSetting = Object.values(response)[0]?.settings?.index?.blocks?.write;
return writeBlockSetting === 'true' || writeBlockSetting === true;
}
export async function waitForWriteBlockToBeRemoved({
esClient,
logger,
index,
}: {
esClient: { asInternalUser: ElasticsearchClient };
logger: Logger;
index: string;
}) {
return pRetry(
async () => {
const isBlocked = await hasIndexWriteBlock({ esClient, index });
if (isBlocked) {
logger.debug(`Waiting for the write block to be removed from "${index}"...`);
throw new Error(
'Waiting for the re-index operation to complete and the write block to be removed...'
);
}
},
{ forever: true, maxTimeout: 10000 }
);
}
export function isKnowledgeBaseIndexWriteBlocked(error: any) {
return (
error instanceof errors.ResponseError &&
error.message.includes(`cluster_block_exception`) &&
error.message.includes(resourceNames.writeIndexAlias.kb)
);
}

View file

@ -0,0 +1,28 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { getNextWriteIndexName } from './reindex_knowledge_base';
describe('getNextWriteIndexName', () => {
it('should return the next write index name', async () => {
expect(getNextWriteIndexName('.kibana-observability-ai-assistant-kb-000008')).toBe(
'.kibana-observability-ai-assistant-kb-000009'
);
});
it('should return empty when input is empty', async () => {
expect(getNextWriteIndexName(undefined)).toBe(undefined);
});
it('should return empty when the sequence number is missing', async () => {
expect(getNextWriteIndexName('.kibana-observability-ai-assistant-kb')).toBe(undefined);
});
it('should return empty when the sequence number is not a number', async () => {
expect(getNextWriteIndexName('.kibana-observability-ai-assistant-kb-foobar')).toBe(undefined);
});
});

View file

@ -5,106 +5,239 @@
* 2.0.
*/
import { errors as EsErrors } from '@elastic/elasticsearch';
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { Logger } from '@kbn/logging';
import { last } from 'lodash';
import pRetry from 'p-retry';
import { CoreSetup } from '@kbn/core/server';
import { LockManagerService } from '@kbn/lock-manager';
import { resourceNames } from '..';
import { createKbConcreteIndex } from '../startup_migrations/create_or_update_index_assets';
import { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
import {
addIndexWriteBlock,
hasIndexWriteBlock,
removeIndexWriteBlock,
} from './index_write_block_utils';
import { createKnowledgeBaseIndex } from './create_knowledge_base_index';
import { updateKnowledgeBaseWriteIndexAlias } from './update_knowledge_base_index_alias';
export const KB_REINDEXING_LOCK_ID = 'observability_ai_assistant:kb_reindexing';
export async function reIndexKnowledgeBaseWithLock({
core,
logger,
esClient,
inferenceId,
}: {
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
logger: Logger;
esClient: {
asInternalUser: ElasticsearchClient;
};
inferenceId: string;
}): Promise<boolean> {
const lmService = new LockManagerService(core, logger);
return lmService.withLock(KB_REINDEXING_LOCK_ID, () =>
reIndexKnowledgeBase({ logger, esClient })
reIndexKnowledgeBaseWithWriteIndexBlock({
logger: logger.get('kb-reindex'),
esClient,
inferenceId,
})
);
}
async function reIndexKnowledgeBaseWithWriteIndexBlock({
logger,
esClient,
inferenceId,
}: {
logger: Logger;
esClient: { asInternalUser: ElasticsearchClient };
inferenceId: string;
}): Promise<boolean> {
logger.debug('Initializing re-indexing of knowledge base...');
if (await hasIndexWriteBlock({ esClient, index: resourceNames.writeIndexAlias.kb })) {
throw new Error(
`Write block is already set on the knowledge base index: ${resourceNames.writeIndexAlias.kb}`
);
}
try {
await addIndexWriteBlock({ esClient, index: resourceNames.writeIndexAlias.kb });
await reIndexKnowledgeBase({ logger, esClient, inferenceId });
logger.info('Re-indexing knowledge base completed successfully.');
} catch (error) {
logger.error(`Re-indexing knowledge base failed: ${error.message}`);
throw error;
} finally {
await removeIndexWriteBlock({ esClient, index: resourceNames.writeIndexAlias.kb });
}
return true;
}
async function reIndexKnowledgeBase({
logger,
esClient,
inferenceId,
}: {
logger: Logger;
esClient: {
asInternalUser: ElasticsearchClient;
};
}): Promise<boolean> {
logger.debug('Initiating knowledge base re-indexing...');
esClient: { asInternalUser: ElasticsearchClient };
inferenceId: string;
}): Promise<void> {
const activeReindexingTask = await getActiveReindexingTaskId(esClient);
if (activeReindexingTask) {
throw new Error(
`Re-indexing task "${activeReindexingTask}" is already in progress for the knowledge base index: ${resourceNames.writeIndexAlias.kb}`
);
}
try {
const originalIndex = resourceNames.concreteIndexName.kb;
const tempIndex = `${resourceNames.aliases.kb}-000002`;
const { currentWriteIndexName, nextWriteIndexName } = await getCurrentAndNextWriteIndexNames({
esClient,
logger,
});
// Create temporary index
logger.debug(`Creating temporary index "${tempIndex}"...`);
await esClient.asInternalUser.indices.delete({ index: tempIndex }, { ignore: [404] });
await esClient.asInternalUser.indices.create({ index: tempIndex });
await createKnowledgeBaseIndex({ esClient, logger, inferenceId, indexName: nextWriteIndexName });
// Perform reindex to temporary index
logger.debug(`Re-indexing knowledge base to temporary index "${tempIndex}"...`);
await esClient.asInternalUser.reindex({
source: { index: originalIndex },
dest: { index: tempIndex },
refresh: true,
wait_for_completion: true,
});
logger.info(
`Re-indexing knowledge base from "${currentWriteIndexName}" to index "${nextWriteIndexName}"...`
);
// Delete and re-create original index
logger.debug(`Deleting original index "${originalIndex}" and re-creating it...`);
await esClient.asInternalUser.indices.delete({ index: originalIndex });
await createKbConcreteIndex({ logger, esClient });
const reindexResponse = await esClient.asInternalUser.reindex({
source: { index: currentWriteIndexName },
dest: { index: nextWriteIndexName },
refresh: true,
wait_for_completion: false,
});
// Perform reindex back to original index
logger.debug(`Re-indexing knowledge base back to original index "${originalIndex}"...`);
await esClient.asInternalUser.reindex({
source: { index: tempIndex },
dest: { index: originalIndex },
refresh: true,
wait_for_completion: true,
});
// Point write index alias to the new index
await updateKnowledgeBaseWriteIndexAlias({
esClient,
logger,
nextWriteIndexName,
currentWriteIndexName,
});
// Delete temporary index
logger.debug(`Deleting temporary index "${tempIndex}"...`);
await esClient.asInternalUser.indices.delete({ index: tempIndex });
const taskId = reindexResponse.task?.toString();
if (taskId) {
await waitForReIndexTaskToComplete({ esClient, taskId, logger });
} else {
throw new Error(`ID for re-indexing task was not found`);
}
logger.info('Re-indexing knowledge base completed successfully');
return true;
} catch (error) {
logger.error(`Failed to re-index knowledge base: ${error.message}`);
throw new Error(`Failed to re-index knowledge base: ${error.message}`);
// Delete original index
logger.debug(`Deleting write index "${currentWriteIndexName}"`);
await esClient.asInternalUser.indices.delete({ index: currentWriteIndexName });
}
async function getCurrentWriteIndexName(esClient: { asInternalUser: ElasticsearchClient }) {
const response = await esClient.asInternalUser.indices.getAlias(
{ name: resourceNames.writeIndexAlias.kb },
{ ignore: [404] }
);
const currentWriteIndexName = Object.entries(response).find(
([, aliasInfo]) => aliasInfo.aliases[resourceNames.writeIndexAlias.kb]?.is_write_index
)?.[0];
return currentWriteIndexName;
}
export function getNextWriteIndexName(currentWriteIndexName: string | undefined) {
if (!currentWriteIndexName) {
return;
}
const latestIndexNumber = last(currentWriteIndexName.split('-'));
if (!latestIndexNumber) {
return;
}
// sequence number must be a six digit zero padded number like 000008 or 002201
const isSequenceNumberValid = /^\d{6}$/.test(latestIndexNumber);
if (!isSequenceNumberValid) {
return;
}
const nextIndexSequenceNumber = (parseInt(latestIndexNumber, 10) + 1).toString().padStart(6, '0');
return `${resourceNames.writeIndexAlias.kb}-${nextIndexSequenceNumber}`;
}
async function getCurrentAndNextWriteIndexNames({
esClient,
logger,
}: {
esClient: { asInternalUser: ElasticsearchClient };
logger: Logger;
}) {
const currentWriteIndexName = await getCurrentWriteIndexName(esClient);
const nextWriteIndexName = getNextWriteIndexName(currentWriteIndexName);
if (!currentWriteIndexName || !nextWriteIndexName) {
throw new Error(
`"${currentWriteIndexName}" is not a valid write index name. Skipping re-indexing of knowledge base.`
);
}
return { currentWriteIndexName, nextWriteIndexName };
}
export async function getActiveReindexingTaskId(esClient: { asInternalUser: ElasticsearchClient }) {
const response = await esClient.asInternalUser.tasks.list({
detailed: true,
actions: ['indices:data/write/reindex'],
});
for (const node of Object.values(response.nodes ?? {})) {
for (const [taskId, task] of Object.entries(node.tasks)) {
if (task.description?.includes(resourceNames.writeIndexAlias.kb)) {
return taskId;
}
}
}
}
export function isKnowledgeBaseIndexWriteBlocked(error: any) {
return (
error instanceof EsErrors.ResponseError &&
error.message.includes(
`cluster_block_exception: index [${resourceNames.concreteIndexName.kb}] blocked`
)
async function waitForReIndexTaskToComplete({
esClient,
taskId,
logger,
}: {
esClient: { asInternalUser: ElasticsearchClient };
taskId: string;
logger: Logger;
}): Promise<void> {
return pRetry(
async () => {
const taskResponse = await esClient.asInternalUser.tasks.get({
task_id: taskId,
wait_for_completion: false,
});
if (!taskResponse.completed) {
logger.debug(`Waiting for re-indexing task "${taskId}" to complete...`);
throw new Error(`Waiting for re-indexing task "${taskId}" to complete...`);
}
},
{ forever: true, maxTimeout: 10000 }
);
}
export function isSemanticTextUnsupportedError(error: Error) {
const semanticTextUnsupportedError =
'The [sparse_vector] field type is not supported on indices created on versions 8.0 to 8.10';
export async function isReIndexInProgress({
esClient,
logger,
core,
}: {
esClient: { asInternalUser: ElasticsearchClient };
logger: Logger;
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
}) {
const lmService = new LockManagerService(core, logger);
const isSemanticTextUnspported =
error instanceof EsErrors.ResponseError &&
(error.message.includes(semanticTextUnsupportedError) ||
// @ts-expect-error
error.meta?.body?.error?.caused_by?.reason.includes(semanticTextUnsupportedError));
const [lock, activeReindexingTask] = await Promise.all([
lmService.getLock(KB_REINDEXING_LOCK_ID),
getActiveReindexingTaskId(esClient),
]);
return isSemanticTextUnspported;
logger.debug(`Lock: ${!!lock}`);
logger.debug(`ES re-indexing task: ${!!activeReindexingTask}`);
return lock !== undefined || activeReindexingTask !== undefined;
}

View file

@ -0,0 +1,33 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { Logger } from '@kbn/logging';
import { resourceNames } from '..';
export async function updateKnowledgeBaseWriteIndexAlias({
esClient,
logger,
nextWriteIndexName,
currentWriteIndexName,
}: {
esClient: { asInternalUser: ElasticsearchClient };
logger: Logger;
nextWriteIndexName: string;
currentWriteIndexName: string;
}) {
logger.debug(
`Updating write index alias from "${currentWriteIndexName}" to "${nextWriteIndexName}"`
);
const alias = resourceNames.writeIndexAlias.kb;
await esClient.asInternalUser.indices.updateAliases({
actions: [
{ remove: { index: currentWriteIndexName, alias } },
{ add: { index: nextWriteIndexName, alias, is_write_index: true } },
],
});
}

View file

@ -1,146 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { createConcreteWriteIndex, getDataStreamAdapter } from '@kbn/alerting-plugin/server';
import type { CoreSetup, ElasticsearchClient, Logger } from '@kbn/core/server';
import type { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
import { conversationComponentTemplate } from '../conversation_component_template';
import { kbComponentTemplate } from '../kb_component_template';
import { resourceNames } from '..';
export async function updateExistingIndexAssets({
logger,
core,
}: {
logger: Logger;
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
}) {
const [coreStart] = await core.getStartServices();
const { asInternalUser } = coreStart.elasticsearch.client;
const hasKbIndex = await asInternalUser.indices.exists({
index: resourceNames.aliases.kb,
});
const hasConversationIndex = await asInternalUser.indices.exists({
index: resourceNames.aliases.conversations,
});
if (!hasKbIndex && !hasConversationIndex) {
logger.warn('Index assets do not exist. Aborting updating index assets');
return;
}
await createOrUpdateIndexAssets({ logger, core });
}
export async function createOrUpdateIndexAssets({
logger,
core,
}: {
logger: Logger;
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
}) {
try {
logger.debug('Setting up index assets');
const [coreStart] = await core.getStartServices();
const { asInternalUser } = coreStart.elasticsearch.client;
// Conversations: component template
await asInternalUser.cluster.putComponentTemplate({
create: false,
name: resourceNames.componentTemplate.conversations,
template: conversationComponentTemplate,
});
// Conversations: index template
await asInternalUser.indices.putIndexTemplate({
name: resourceNames.indexTemplate.conversations,
composed_of: [resourceNames.componentTemplate.conversations],
create: false,
index_patterns: [resourceNames.indexPatterns.conversations],
template: {
settings: {
number_of_shards: 1,
auto_expand_replicas: '0-1',
hidden: true,
},
},
});
// Conversations: write index
const conversationAliasName = resourceNames.aliases.conversations;
await createConcreteWriteIndex({
esClient: asInternalUser,
logger,
totalFieldsLimit: 10000,
indexPatterns: {
alias: conversationAliasName,
pattern: `${conversationAliasName}*`,
basePattern: `${conversationAliasName}*`,
name: resourceNames.concreteIndexName.conversations,
template: resourceNames.indexTemplate.conversations,
},
dataStreamAdapter: getDataStreamAdapter({ useDataStreamForAlerts: false }),
});
// Knowledge base: component template
await asInternalUser.cluster.putComponentTemplate({
create: false,
name: resourceNames.componentTemplate.kb,
template: kbComponentTemplate,
});
// Knowledge base: index template
await asInternalUser.indices.putIndexTemplate({
name: resourceNames.indexTemplate.kb,
composed_of: [resourceNames.componentTemplate.kb],
create: false,
index_patterns: [resourceNames.indexPatterns.kb],
template: {
settings: {
number_of_shards: 1,
auto_expand_replicas: '0-1',
hidden: true,
},
},
});
// Knowledge base: write index
await createKbConcreteIndex({ logger, esClient: coreStart.elasticsearch.client });
logger.info('Successfully set up index assets');
} catch (error) {
logger.error(`Failed setting up index assets: ${error.message}`);
logger.debug(error);
}
}
export async function createKbConcreteIndex({
logger,
esClient,
}: {
logger: Logger;
esClient: {
asInternalUser: ElasticsearchClient;
};
}) {
const kbAliasName = resourceNames.aliases.kb;
return createConcreteWriteIndex({
esClient: esClient.asInternalUser,
logger,
totalFieldsLimit: 10000,
indexPatterns: {
alias: kbAliasName,
pattern: `${kbAliasName}*`,
basePattern: `${kbAliasName}*`,
name: resourceNames.concreteIndexName.kb,
template: resourceNames.indexTemplate.kb,
},
dataStreamAdapter: getDataStreamAdapter({ useDataStreamForAlerts: false }),
});
}

View file

@ -1,183 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import pLimit from 'p-limit';
import type { CoreSetup, Logger } from '@kbn/core/server';
import { uniq } from 'lodash';
import pRetry from 'p-retry';
import { LockAcquisitionError, LockManagerService } from '@kbn/lock-manager';
import { KnowledgeBaseEntry } from '../../../common';
import { resourceNames } from '..';
import { waitForKbModel } from '../inference_endpoint';
import { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
import { ObservabilityAIAssistantConfig } from '../../config';
import { reIndexKnowledgeBaseWithLock } from '../knowledge_base_service/reindex_knowledge_base';
const PLUGIN_STARTUP_LOCK_ID = 'observability_ai_assistant:startup_migrations';
// This function populates the `semantic_text` field for knowledge base entries during the plugin's startup process.
// It ensures all missing fields are updated in batches and uses a distributed lock to prevent conflicts in distributed environments.
// If the knowledge base index does not support the `semantic_text` field, it is re-indexed.
export async function populateMissingSemanticTextFieldMigration({
core,
logger,
config,
}: {
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
logger: Logger;
config: ObservabilityAIAssistantConfig;
}) {
const [coreStart] = await core.getStartServices();
const esClient = coreStart.elasticsearch.client;
const lmService = new LockManagerService(core, logger);
await lmService
.withLock(PLUGIN_STARTUP_LOCK_ID, async () => {
const hasKbIndex = await esClient.asInternalUser.indices.exists({
index: resourceNames.aliases.kb,
});
if (!hasKbIndex) {
logger.warn('Knowledge base index does not exist. Aborting updating index assets');
return;
}
const isKbSemanticTextCompatible = await isKnowledgeBaseSemanticTextCompatible({
logger,
esClient,
});
if (!isKbSemanticTextCompatible) {
await reIndexKnowledgeBaseWithLock({ core, logger, esClient });
}
await pRetry(
async () => populateMissingSemanticTextFieldRecursively({ esClient, logger, config }),
{ retries: 5, minTimeout: 10_000 }
);
})
.catch((error) => {
if (!(error instanceof LockAcquisitionError)) {
throw error;
}
});
}
// Ensures that every doc has populated the `semantic_text` field.
// It retrieves entries without the field, updates them in batches, and continues until no entries remain.
async function populateMissingSemanticTextFieldRecursively({
esClient,
logger,
config,
}: {
esClient: { asInternalUser: ElasticsearchClient };
logger: Logger;
config: ObservabilityAIAssistantConfig;
}) {
logger.debug(
'Checking for remaining entries without semantic_text field that need to be migrated'
);
const response = await esClient.asInternalUser.search<KnowledgeBaseEntry>({
size: 100,
track_total_hits: true,
index: [resourceNames.aliases.kb],
query: {
bool: {
must_not: {
exists: {
field: 'semantic_text',
},
},
},
},
_source: {
excludes: ['ml.tokens'],
},
});
if (response.hits.hits.length === 0) {
logger.debug('No remaining entries to migrate');
return;
}
await waitForKbModel({ esClient, logger, config });
const indicesWithOutdatedEntries = uniq(response.hits.hits.map((hit) => hit._index));
logger.debug(
`Found ${response.hits.hits.length} entries without semantic_text field in "${indicesWithOutdatedEntries}". Updating now...`
);
// Limit the number of concurrent requests to avoid overloading the cluster
const limiter = pLimit(20);
const promises = response.hits.hits.map((hit) => {
return limiter(() => {
if (!hit._source || !hit._id) {
return;
}
return esClient.asInternalUser.update({
refresh: 'wait_for',
index: resourceNames.aliases.kb,
id: hit._id,
doc: {
...hit._source,
semantic_text: hit._source.text ?? 'No text',
},
});
});
});
await Promise.all(promises);
logger.debug(`Updated ${promises.length} entries`);
await sleep(100);
await populateMissingSemanticTextFieldRecursively({ esClient, logger, config });
}
async function sleep(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
// Checks if the knowledge base index supports `semantic_text`
// If the index was created before version 8.11, it requires re-indexing to support the `semantic_text` field.
async function isKnowledgeBaseSemanticTextCompatible({
logger,
esClient,
}: {
logger: Logger;
esClient: { asInternalUser: ElasticsearchClient };
}): Promise<boolean> {
const indexSettingsResponse = await esClient.asInternalUser.indices.getSettings({
index: resourceNames.aliases.kb,
});
const results = Object.entries(indexSettingsResponse);
if (results.length === 0) {
logger.debug('No knowledge base indices found. Skipping re-indexing.');
return true;
}
const [indexName, { settings }] = results[0];
const createdVersion = parseInt(settings?.index?.version?.created ?? '', 10);
// Check if the index was created before version 8.11
const versionThreshold = 8110000; // Version 8.11.0
if (createdVersion >= versionThreshold) {
logger.debug(
`Knowledge base index "${indexName}" was created in version ${createdVersion}, and does not require re-indexing. Semantic text field is already supported. Aborting`
);
return true;
}
logger.info(
`Knowledge base index was created in ${createdVersion} and must be re-indexed in order to support semantic_text field. Re-indexing now...`
);
return false;
}

View file

@ -0,0 +1,112 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import pLimit from 'p-limit';
import type { CoreSetup, Logger } from '@kbn/core/server';
import { uniq } from 'lodash';
import { LockManagerService } from '@kbn/lock-manager';
import { KnowledgeBaseEntry } from '../../../common';
import { resourceNames } from '..';
import { waitForKbModel } from '../inference_endpoint';
import { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
import { ObservabilityAIAssistantConfig } from '../../config';
import { sleep } from '../util/sleep';
import { getInferenceIdFromWriteIndex } from '../knowledge_base_service/get_inference_id_from_write_index';
const POPULATE_MISSING_SEMANTIC_TEXT_FIELDS_LOCK_ID = 'populate_missing_semantic_text_fields';
export async function populateMissingSemanticTextFieldWithLock({
core,
logger,
config,
esClient,
}: {
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
logger: Logger;
config: ObservabilityAIAssistantConfig;
esClient: { asInternalUser: ElasticsearchClient };
}) {
const lmService = new LockManagerService(core, logger);
await lmService.withLock(POPULATE_MISSING_SEMANTIC_TEXT_FIELDS_LOCK_ID, async () =>
populateMissingSemanticTextFieldRecursively({ core, esClient, logger, config })
);
}
// Ensures that every doc has populated the `semantic_text` field.
// It retrieves entries without the field, updates them in batches, and continues until no entries remain.
async function populateMissingSemanticTextFieldRecursively({
core,
esClient,
logger,
config,
}: {
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
esClient: { asInternalUser: ElasticsearchClient };
logger: Logger;
config: ObservabilityAIAssistantConfig;
}) {
logger.debug(
'Checking for remaining entries without semantic_text field that need to be migrated'
);
const response = await esClient.asInternalUser.search<KnowledgeBaseEntry>({
size: 100,
track_total_hits: true,
index: [resourceNames.writeIndexAlias.kb],
query: {
bool: {
must_not: {
exists: {
field: 'semantic_text',
},
},
},
},
_source: {
excludes: ['ml.tokens'],
},
});
if (response.hits.hits.length === 0) {
logger.debug('No remaining entries to migrate');
return;
}
const inferenceId = await getInferenceIdFromWriteIndex(esClient);
await waitForKbModel({ core, esClient, logger, config, inferenceId });
const indicesWithOutdatedEntries = uniq(response.hits.hits.map((hit) => hit._index));
logger.debug(
`Found ${response.hits.hits.length} entries without semantic_text field in "${indicesWithOutdatedEntries}". Updating now...`
);
// Limit the number of concurrent requests to avoid overloading the cluster
const limiter = pLimit(20);
const promises = response.hits.hits.map((hit) => {
return limiter(() => {
if (!hit._source || !hit._id) {
return;
}
return esClient.asInternalUser.update({
refresh: 'wait_for',
index: resourceNames.writeIndexAlias.kb,
id: hit._id,
doc: {
...hit._source,
semantic_text: hit._source.text ?? 'No text',
},
});
});
});
await Promise.all(promises);
logger.debug(`Updated ${promises.length} entries`);
await sleep(100);
await populateMissingSemanticTextFieldRecursively({ core, esClient, logger, config });
}

View file

@ -0,0 +1,134 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import type { CoreSetup, Logger } from '@kbn/core/server';
import pRetry from 'p-retry';
import { errors } from '@elastic/elasticsearch';
import { LockAcquisitionError, LockManagerService } from '@kbn/lock-manager';
import { resourceNames } from '..';
import { ObservabilityAIAssistantPluginStartDependencies } from '../../types';
import { ObservabilityAIAssistantConfig } from '../../config';
import { reIndexKnowledgeBaseWithLock } from '../knowledge_base_service/reindex_knowledge_base';
import { populateMissingSemanticTextFieldWithLock } from './populate_missing_semantic_text_fields';
import { hasKbWriteIndex } from '../knowledge_base_service/has_kb_index';
import { getInferenceIdFromWriteIndex } from '../knowledge_base_service/get_inference_id_from_write_index';
import { updateExistingIndexAssets } from '../index_assets/update_existing_index_assets';
const PLUGIN_STARTUP_LOCK_ID = 'observability_ai_assistant:startup_migrations';
// This function performs necessary startup migrations for the observability AI assistant:
// 1. Updates index assets to ensure mappings are correct
// 2. If the knowledge base index does not support the `semantic_text` field, it is re-indexed.
// 3. Populates the `semantic_text` field for knowledge base entries
export async function runStartupMigrations({
core,
logger,
config,
}: {
core: CoreSetup<ObservabilityAIAssistantPluginStartDependencies>;
logger: Logger;
config: ObservabilityAIAssistantConfig;
}) {
// update index assets to ensure mappings are correct
await updateExistingIndexAssets({ logger, core });
const [coreStart] = await core.getStartServices();
const esClient = coreStart.elasticsearch.client;
const lmService = new LockManagerService(core, logger);
await lmService
.withLock(PLUGIN_STARTUP_LOCK_ID, async () => {
const doesKbIndexExist = await hasKbWriteIndex({ esClient });
if (!doesKbIndexExist) {
logger.info('Knowledge base index does not exist. Aborting updating index assets');
return;
}
const isKbSemanticTextCompatible = await isKnowledgeBaseSemanticTextCompatible({
logger,
esClient,
});
if (!isKbSemanticTextCompatible) {
const inferenceId = await getInferenceIdFromWriteIndex(esClient);
await reIndexKnowledgeBaseWithLock({ core, logger, esClient, inferenceId });
}
await pRetry(
async () => populateMissingSemanticTextFieldWithLock({ core, logger, config, esClient }),
{
retries: 5,
minTimeout: 10_000,
onFailedAttempt: async (error) => {
const isLockAcquisitionError = error instanceof LockAcquisitionError;
if (!isLockAcquisitionError) {
throw error;
}
},
}
);
})
.catch((error) => {
const isLockAcquisitionError = error instanceof LockAcquisitionError;
if (!isLockAcquisitionError) {
throw error;
}
});
}
// Checks if the knowledge base index supports `semantic_text`
// If the index was created before version 8.11, it requires re-indexing to support the `semantic_text` field.
async function isKnowledgeBaseSemanticTextCompatible({
logger,
esClient,
}: {
logger: Logger;
esClient: { asInternalUser: ElasticsearchClient };
}): Promise<boolean> {
const indexSettingsResponse = await esClient.asInternalUser.indices.getSettings({
index: resourceNames.writeIndexAlias.kb,
});
const results = Object.entries(indexSettingsResponse);
if (results.length === 0) {
logger.debug('No knowledge base indices found. Skipping re-indexing.');
return true;
}
const [indexName, { settings }] = results[0];
const createdVersion = parseInt(settings?.index?.version?.created ?? '', 10);
// Check if the index was created before version 8.11
const versionThreshold = 8110000; // Version 8.11.0
if (createdVersion >= versionThreshold) {
logger.debug(
`Knowledge base index "${indexName}" was created in version ${createdVersion}, and does not require re-indexing. Semantic text field is already supported. Aborting`
);
return true;
}
logger.info(
`Knowledge base index was created in ${createdVersion} and must be re-indexed in order to support semantic_text field. Re-indexing now...`
);
return false;
}
export function isSemanticTextUnsupportedError(error: Error) {
const semanticTextUnsupportedError =
'The [sparse_vector] field type is not supported on indices created on versions 8.0 to 8.10';
const isSemanticTextUnspported =
error instanceof errors.ResponseError &&
(error.message.includes(semanticTextUnsupportedError) ||
// @ts-expect-error
error.meta?.body?.error?.caused_by?.reason.includes(semanticTextUnsupportedError));
return isSemanticTextUnspported;
}

View file

@ -0,0 +1,10 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export async function sleep(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
}

View file

@ -51,6 +51,7 @@ export async function recallAndScore({
);
if (!suggestions.length) {
logger.debug('No suggestions found during recall');
return {
relevantDocuments: [],
llmScores: [],
@ -58,6 +59,8 @@ export async function recallAndScore({
};
}
logger.debug(`Found ${suggestions.length} suggestions during recall`);
try {
const { llmScores, relevantDocuments } = await scoreSuggestions({
suggestions,
@ -70,6 +73,10 @@ export async function recallAndScore({
chat,
});
logger.debug(
`Found ${relevantDocuments.length} relevant documents out of ${suggestions.length} suggestions`
);
analytics.reportEvent<RecallRanking>(recallRankingEventType, {
scoredDocuments: suggestions.map((suggestion) => {
const llmScore = llmScores.find((score) => score.id === suggestion.id);

View file

@ -16,10 +16,12 @@ import { parseSuggestionScores } from './parse_suggestion_scores';
import { RecalledSuggestion } from './recall_and_score';
import { ShortIdTable } from '../../../common/utils/short_id_table';
export const SCORE_FUNCTION_NAME = 'score';
const scoreFunctionRequestRt = t.type({
message: t.type({
function_call: t.type({
name: t.literal('score'),
name: t.literal(SCORE_FUNCTION_NAME),
arguments: t.string,
}),
}),
@ -91,7 +93,7 @@ export async function scoreSuggestions({
};
const scoreFunction = {
name: 'score',
name: SCORE_FUNCTION_NAME,
description:
'Use this function to score documents based on how relevant they are to the conversation.',
parameters: {
@ -115,7 +117,7 @@ export async function scoreSuggestions({
chat('score_suggestions', {
messages: [...messages.slice(0, -2), newUserMessage],
functions: [scoreFunction],
functionCall: 'score',
functionCall: SCORE_FUNCTION_NAME,
signal,
stream: true,
}).pipe(concatenateChatCompletionChunks())

View file

@ -54,6 +54,7 @@
"@kbn/core-http-browser",
"@kbn/sse-utils",
"@kbn/core-security-server",
"@kbn/ml-trained-models-utils",
"@kbn/lock-manager"
],
"exclude": ["target/**/*"]

View file

@ -1,2 +0,0 @@
# unzipped snapshot folder
knowledge_base/snapshot_kb_8.10/

View file

@ -72,7 +72,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
// Fails on ECH: https://github.com/elastic/kibana/issues/219203
it.skip('returns a 200 if the connector exists', async () => {
void proxy.interceptConversation('Hello from LLM Proxy');
void proxy.interceptWithResponse('Hello from LLM Proxy');
const { status } = await observabilityAIAssistantAPIClient.editor({
endpoint: 'POST /internal/observability_ai_assistant/chat',
params: {
@ -92,7 +92,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
// Fails on ECH: https://github.com/elastic/kibana/issues/219203
it.skip('should forward the system message to the LLM', async () => {
const simulatorPromise = proxy.interceptConversation('Hello from LLM Proxy');
const simulatorPromise = proxy.interceptWithResponse('Hello from LLM Proxy');
await observabilityAIAssistantAPIClient.editor({
endpoint: 'POST /internal/observability_ai_assistant/chat',
params: {
@ -130,7 +130,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
new Promise<void>((resolve, reject) => {
async function runTest() {
const chunks = times(NUM_RESPONSES).map((i) => `Part: ${i}\n`);
void proxy.interceptConversation(chunks);
void proxy.interceptWithResponse(chunks);
const receivedChunks: Array<Record<string, any>> = [];

View file

@ -102,7 +102,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
withInternalHeaders: true,
});
proxy.interceptConversation('Hello!').catch((e) => {
proxy.interceptWithResponse('Hello!').catch((e) => {
log.error(`Failed to intercept conversation ${e}`);
});
@ -223,7 +223,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
});
it('forwards the system message as the first message in the request to the LLM with message role "system"', async () => {
const simulatorPromise = proxy.interceptConversation('Hello from LLM Proxy');
const simulatorPromise = proxy.interceptWithResponse('Hello from LLM Proxy');
await observabilityAIAssistantAPIClient.editor({
endpoint: 'POST /internal/observability_ai_assistant/chat/complete',
params: {
@ -251,7 +251,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
before(async () => {
void proxy.interceptTitle('Title for a new conversation');
void proxy.interceptConversation('Hello again');
void proxy.interceptWithResponse('Hello again');
const allEvents = await getEvents({});
events = allEvents.filter(
@ -387,7 +387,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
throw new Error('Failed to intercept conversation title', e);
});
proxy.interceptConversation('Good night, sir!').catch((e) => {
proxy.interceptWithResponse('Good night, sir!').catch((e) => {
throw new Error('Failed to intercept conversation ', e);
});
@ -420,7 +420,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
},
});
proxy.interceptConversation('Good night, sir!').catch((e) => {
proxy.interceptWithResponse('Good night, sir!').catch((e) => {
log.error(`Failed to intercept conversation ${e}`);
});

View file

@ -101,7 +101,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
},
});
void proxy.interceptConversation('Hello from LLM Proxy');
void proxy.interceptWithResponse('Hello from LLM Proxy');
const alertsResponseBody = await invokeChatCompleteWithFunctionRequest({
connectorId,

View file

@ -22,13 +22,13 @@ import {
createLlmProxy,
} from '../../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../../ftr_provider_context';
import {
addSampleDocsToInternalKb,
clearKnowledgeBase,
deleteKnowledgeBaseModel,
setupKnowledgeBase,
} from '../../utils/knowledge_base';
import { addSampleDocsToInternalKb, clearKnowledgeBase } from '../../utils/knowledge_base';
import { chatComplete } from '../../utils/conversation';
import {
deployTinyElserAndSetupKb,
teardownTinyElserModelAndInferenceEndpoint,
} from '../../utils/model_and_inference';
import { restoreIndexAssets } from '../../utils/index_assets';
const screenContexts = [
{
@ -84,12 +84,13 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({
port: llmProxy.getPort(),
});
await setupKnowledgeBase(getService);
await restoreIndexAssets(observabilityAIAssistantAPIClient, es);
await deployTinyElserAndSetupKb(getService);
await addSampleDocsToInternalKb(getService, sampleDocsForInternalKb);
({ getDocuments } = llmProxy.interceptScoreToolChoice(log));
void llmProxy.interceptConversation('Your favourite color is blue.');
void llmProxy.interceptWithResponse('Your favourite color is blue.');
({ messageAddedEvents } = await chatComplete({
userPrompt,
@ -107,7 +108,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
actionId: connectorId,
});
await deleteKnowledgeBaseModel(getService);
await teardownTinyElserModelAndInferenceEndpoint(getService);
await clearKnowledgeBase(es);
});
@ -158,7 +159,6 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
const extractedDocs = await getDocuments();
const expectedTexts = sampleDocsForInternalKb.map((doc) => doc.text).sort();
const actualTexts = extractedDocs.map((doc) => doc.text).sort();
expect(actualTexts).to.eql(expectedTexts);
});
});

View file

@ -41,7 +41,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
});
// intercept the LLM request and return a fixed response
void proxy.interceptConversation('Hello from LLM Proxy');
void proxy.interceptWithResponse('Hello from LLM Proxy');
await generateApmData(apmSynthtraceEsClient);

View file

@ -86,7 +86,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
}),
});
void llmProxy.interceptConversation('Hello from user');
void llmProxy.interceptWithResponse('Hello from user');
({ messageAddedEvents } = await chatComplete({
userPrompt: 'Please retrieve the most recent Apache log messages',

View file

@ -82,7 +82,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
arguments: () => JSON.stringify({ start: 'now-10d', end: 'now' }),
});
void llmProxy.interceptConversation(
void llmProxy.interceptWithResponse(
`You have active alerts for the past 10 days. Back to work!`
);

View file

@ -70,7 +70,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
({ getRelevantFields } = llmProxy.interceptSelectRelevantFieldsToolChoice());
void llmProxy.interceptConversation(`Yes, you do have logs. Congratulations! 🎈️🎈️🎈️`);
void llmProxy.interceptWithResponse(`Yes, you do have logs. Congratulations! 🎈️🎈️🎈️`);
({ messageAddedEvents } = await chatComplete({
userPrompt: USER_MESSAGE,

View file

@ -10,12 +10,14 @@ import { first, uniq } from 'lodash';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../../ftr_provider_context';
import {
clearKnowledgeBase,
deleteKnowledgeBaseModel,
addSampleDocsToInternalKb,
addSampleDocsToCustomIndex,
setupKnowledgeBase,
} from '../../utils/knowledge_base';
import { animalSampleDocs, technicalSampleDocs } from '../../utils/sample_docs';
import {
deployTinyElserAndSetupKb,
teardownTinyElserModelAndInferenceEndpoint,
} from '../../utils/model_and_inference';
const customSearchConnectorIndex = 'animals_kb';
@ -25,13 +27,13 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
describe('recall', function () {
before(async () => {
await setupKnowledgeBase(getService);
await deployTinyElserAndSetupKb(getService);
await addSampleDocsToInternalKb(getService, technicalSampleDocs);
await addSampleDocsToCustomIndex(getService, animalSampleDocs, customSearchConnectorIndex);
});
after(async () => {
await deleteKnowledgeBaseModel(getService);
await teardownTinyElserModelAndInferenceEndpoint(getService);
await clearKnowledgeBase(es);
// clear custom index
await es.indices.delete({ index: customSearchConnectorIndex }, { ignore: [404] });

View file

@ -39,7 +39,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({
port: llmProxy.getPort(),
});
void llmProxy.interceptConversation('Hello from LLM Proxy');
void llmProxy.interceptWithResponse('Hello from LLM Proxy');
await chatComplete({
userPrompt: USER_PROMPT,
@ -106,7 +106,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
when: () => true,
});
void llmProxy.interceptConversation('Hello from LLM Proxy');
void llmProxy.interceptWithResponse('Hello from LLM Proxy');
({ messageAddedEvents } = await chatComplete({
userPrompt: USER_PROMPT,

View file

@ -14,10 +14,10 @@ import {
import type { DeploymentAgnosticFtrProviderContext } from '../../../../../ftr_provider_context';
import { invokeChatCompleteWithFunctionRequest } from '../../utils/conversation';
import {
clearKnowledgeBase,
deleteKnowledgeBaseModel,
setupKnowledgeBase,
} from '../../utils/knowledge_base';
deployTinyElserAndSetupKb,
teardownTinyElserModelAndInferenceEndpoint,
} from '../../utils/model_and_inference';
import { clearKnowledgeBase, getKnowledgeBaseEntriesFromApi } from '../../utils/knowledge_base';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const log = getService('log');
@ -31,15 +31,15 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
let connectorId: string;
before(async () => {
await setupKnowledgeBase(getService);
await deployTinyElserAndSetupKb(getService);
proxy = await createLlmProxy(log);
connectorId = await observabilityAIAssistantAPIClient.createProxyActionConnector({
port: proxy.getPort(),
});
// intercept the LLM request and return a fixed response
void proxy.interceptConversation('Hello from LLM Proxy');
void proxy.interceptWithResponse('Hello from LLM Proxy');
await invokeChatCompleteWithFunctionRequest({
connectorId,
@ -61,26 +61,17 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
});
after(async () => {
proxy.close();
proxy?.close();
await observabilityAIAssistantAPIClient.deleteActionConnector({
actionId: connectorId,
});
await deleteKnowledgeBaseModel(getService);
await teardownTinyElserModelAndInferenceEndpoint(getService);
await clearKnowledgeBase(es);
});
it('persists entry in knowledge base', async () => {
const res = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/entries',
params: {
query: {
query: '',
sortBy: 'title',
sortDirection: 'asc',
},
},
});
const res = await getKnowledgeBaseEntriesFromApi({ observabilityAIAssistantAPIClient });
const { role, public: isPublic, text, type, user, title } = res.body.entries[0];

View file

@ -54,7 +54,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
before(async () => {
await clearConversations(es);
const simulatorPromise = llmProxy.interceptTitle(TITLE);
void llmProxy.interceptConversation('The sky is blue because of Rayleigh scattering.');
void llmProxy.interceptWithResponse('The sky is blue because of Rayleigh scattering.');
const res = await chatComplete({
userPrompt: 'Why the sky is blue?',

View file

@ -28,7 +28,7 @@ import {
} from '@kbn/lock-manager/src/setup_lock_manager_index';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import { getLoggerMock } from '../utils/logger';
import { getLoggerMock } from '../utils/kibana_mocks';
import { dateAsTimestamp, durationAsMs, sleep } from '../utils/time';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {

View file

@ -11,36 +11,40 @@ export default function aiAssistantApiIntegrationTests({
loadTestFile,
}: DeploymentAgnosticFtrProviderContext) {
describe('observability AI Assistant', function () {
loadTestFile(require.resolve('./conversations/conversations.spec.ts'));
loadTestFile(require.resolve('./connectors/connectors.spec.ts'));
loadTestFile(require.resolve('./chat/chat.spec.ts'));
loadTestFile(require.resolve('./complete/complete.spec.ts'));
// Functions
loadTestFile(require.resolve('./complete/functions/alerts.spec.ts'));
loadTestFile(require.resolve('./complete/functions/context.spec.ts'));
loadTestFile(require.resolve('./complete/functions/elasticsearch.spec.ts'));
loadTestFile(require.resolve('./complete/functions/execute_query.spec.ts'));
loadTestFile(require.resolve('./complete/functions/get_alerts_dataset_info.spec.ts'));
loadTestFile(require.resolve('./complete/functions/get_dataset_info.spec.ts'));
loadTestFile(require.resolve('./complete/functions/execute_query.spec.ts'));
loadTestFile(require.resolve('./complete/functions/elasticsearch.spec.ts'));
loadTestFile(require.resolve('./complete/functions/recall.spec.ts'));
loadTestFile(require.resolve('./complete/functions/retrieve_elastic_doc.spec.ts'));
loadTestFile(require.resolve('./complete/functions/summarize.spec.ts'));
loadTestFile(require.resolve('./complete/functions/recall.spec.ts'));
loadTestFile(require.resolve('./complete/functions/context.spec.ts'));
loadTestFile(require.resolve('./complete/functions/title_conversation.spec.ts'));
loadTestFile(require.resolve('./public_complete/public_complete.spec.ts'));
loadTestFile(require.resolve('./knowledge_base/knowledge_base_setup.spec.ts'));
loadTestFile(
require.resolve(
'./knowledge_base/knowledge_base_reindex_and_populate_missing_semantic_text_fields.spec.ts'
)
);
loadTestFile(
require.resolve(
'./knowledge_base/knowledge_base_reindex_to_fix_sparse_vector_support.spec.ts'
)
);
// knowledge base
loadTestFile(require.resolve('./knowledge_base/knowledge_base_8.10_upgrade_test.spec.ts'));
loadTestFile(require.resolve('./knowledge_base/knowledge_base_8.16_upgrade_test.spec.ts'));
loadTestFile(require.resolve('./knowledge_base/knowledge_base_8.18_upgrade_test.spec.ts'));
loadTestFile(require.resolve('./knowledge_base/knowledge_base_reindex_concurrency.spec.ts'));
loadTestFile(require.resolve('./knowledge_base/knowledge_base_setup.spec.ts'));
loadTestFile(require.resolve('./knowledge_base/knowledge_base_status.spec.ts'));
loadTestFile(require.resolve('./knowledge_base/knowledge_base.spec.ts'));
loadTestFile(require.resolve('./knowledge_base/knowledge_base_user_instructions.spec.ts'));
loadTestFile(require.resolve('./knowledge_base/knowledge_base.spec.ts'));
loadTestFile(
require.resolve('./knowledge_base/knowledge_base_change_model_from_elser_to_e5.spec.ts')
);
// Misc.
loadTestFile(require.resolve('./chat/chat.spec.ts'));
loadTestFile(require.resolve('./complete/complete.spec.ts'));
loadTestFile(require.resolve('./index_assets/index_assets.spec.ts'));
loadTestFile(require.resolve('./connectors/connectors.spec.ts'));
loadTestFile(require.resolve('./conversations/conversations.spec.ts'));
// public endpoints
loadTestFile(require.resolve('./public_complete/public_complete.spec.ts'));
loadTestFile(require.resolve('./distributed_lock_manager/distributed_lock_manager.spec.ts'));
});
}

View file

@ -33,7 +33,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
});
}
for (const writeIndexName of Object.values(resourceNames.concreteIndexName)) {
for (const writeIndexName of Object.values(resourceNames.concreteWriteIndexName)) {
it(`should create write index: "${writeIndexName}"`, async () => {
const exists = await es.indices.exists({ index: writeIndexName });
expect(exists).to.be(true);
@ -54,7 +54,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
expect(indices).to.have.length(2);
expect(indices.map(({ index }) => index).sort()).to.eql(
Object.values(resourceNames.concreteIndexName).sort()
Object.values(resourceNames.concreteWriteIndexName).sort()
);
});
});

View file

@ -9,12 +9,11 @@ import expect from '@kbn/expect';
import { type KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/common';
import { orderBy, size, toPairs } from 'lodash';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import { clearKnowledgeBase, getKnowledgeBaseEntriesFromEs } from '../utils/knowledge_base';
import {
clearKnowledgeBase,
deleteKnowledgeBaseModel,
getKnowledgeBaseEntries,
setupKnowledgeBase,
} from '../utils/knowledge_base';
teardownTinyElserModelAndInferenceEndpoint,
deployTinyElserAndSetupKb,
} from '../utils/model_and_inference';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const es = getService('es');
@ -47,11 +46,11 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
describe('Knowledge base', function () {
before(async () => {
await setupKnowledgeBase(getService);
await deployTinyElserAndSetupKb(getService);
});
after(async () => {
await deleteKnowledgeBaseModel(getService);
await teardownTinyElserModelAndInferenceEndpoint(getService);
await clearKnowledgeBase(es);
});
@ -79,7 +78,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
});
it('generates sparse embeddings', async () => {
const hits = await getKnowledgeBaseEntries(es);
const hits = await getKnowledgeBaseEntriesFromEs(es);
const embeddings =
hits[0]._source?._inference_fields?.semantic_text?.inference.chunks.semantic_text[0]
.embeddings;
@ -163,16 +162,14 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
});
describe('when sorting ', () => {
const ascendingOrder = ['my_doc_a', 'my_doc_b', 'my_doc_c'];
it('allows sorting ascending', async () => {
const entries = await getEntries({ sortBy: 'title', sortDirection: 'asc' });
expect(entries.map(({ id }) => id)).to.eql(ascendingOrder);
expect(entries.map(({ id }) => id)).to.eql(['my_doc_a', 'my_doc_b', 'my_doc_c']);
});
it('allows sorting descending', async () => {
const entries = await getEntries({ sortBy: 'title', sortDirection: 'desc' });
expect(entries.map(({ id }) => id)).to.eql([...ascendingOrder].reverse());
expect(entries.map(({ id }) => id)).to.eql(['my_doc_c', 'my_doc_b', 'my_doc_a']);
});
});

View file

@ -0,0 +1,160 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import * as semver from 'semver';
import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/common';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import { getKbIndexCreatedVersion } from '../utils/knowledge_base';
import {
TINY_ELSER_INFERENCE_ID,
TINY_ELSER_MODEL_ID,
setupTinyElserModelAndInferenceEndpoint,
teardownTinyElserModelAndInferenceEndpoint,
} from '../utils/model_and_inference';
import {
createOrUpdateIndexAssets,
deleteIndexAssets,
restoreIndexAssets,
runStartupMigrations,
} from '../utils/index_assets';
import { restoreKbSnapshot } from '../utils/snapshots';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
const es = getService('es');
const retry = getService('retry');
const log = getService('log');
// Sparse vector field was introduced in Elasticsearch 8.11
// The semantic text field was added to the knowledge base index in 8.17
// Indices created in 8.10 do not support semantic text field and need to be reindexed
describe('when upgrading from 8.10 to 8.18', function () {
// Intentionally skipped in all serverless environnments (local and MKI)
// because the migration scenario being tested is not relevant to MKI and Serverless.
this.tags(['skipServerless']);
before(async () => {
// in a real environment we will use the ELSER inference endpoint (`.elser-2-elasticsearch`) which is pre-installed
// For testing purposes we will use the tiny ELSER model
log.info('Setting up tiny ELSER model and inference endpoint');
await setupTinyElserModelAndInferenceEndpoint(getService);
});
after(async () => {
log.info('Restoring index assets');
await restoreIndexAssets(observabilityAIAssistantAPIClient, es);
log.info('Tearing down tiny ELSER model and inference endpoint');
await teardownTinyElserModelAndInferenceEndpoint(getService);
});
describe('before running migrations', () => {
before(async () => {
log.info('Delete index assets');
await deleteIndexAssets(es);
log.info('Restoring snapshot');
await restoreKbSnapshot({
log,
es,
snapshotFolderName: 'snapshot_kb_8.10',
snapshotName: 'my_snapshot',
});
log.info('Creating index assets');
await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient);
});
it('has an index created version earlier than 8.11', async () => {
await retry.try(async () => {
const indexCreatedVersion = await getKbIndexCreatedVersion(es);
expect(semver.lt(indexCreatedVersion, '8.11.0')).to.be(true);
});
});
it('cannot add new entries to KB until reindex has completed', async () => {
const res1 = await createKnowledgeBaseEntry();
expect(res1.status).to.be(503);
expect((res1.body as unknown as Error).message).to.eql(
'The index ".kibana-observability-ai-assistant-kb" does not support semantic text and must be reindexed. This re-index operation has been scheduled and will be started automatically. Please try again later.'
);
// wait for reindex to have updated the index
await retry.try(async () => {
const indexCreatedVersion = await getKbIndexCreatedVersion(es);
expect(semver.gte(indexCreatedVersion, '8.18.0')).to.be(true);
});
const res2 = await createKnowledgeBaseEntry();
expect(res2.status).to.be(200);
});
});
describe('after running migrations', () => {
beforeEach(async () => {
await deleteIndexAssets(es);
await restoreKbSnapshot({
log,
es,
snapshotFolderName: 'snapshot_kb_8.10',
snapshotName: 'my_snapshot',
});
await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient);
await runStartupMigrations(observabilityAIAssistantAPIClient);
});
it('has an index created version later than 8.18', async () => {
await retry.try(async () => {
const indexCreatedVersion = await getKbIndexCreatedVersion(es);
expect(semver.gt(indexCreatedVersion, '8.18.0')).to.be(true);
});
});
it('can add new entries', async () => {
const { status } = await createKnowledgeBaseEntry();
expect(status).to.be(200);
});
it('has default ELSER inference endpoint', async () => {
await retry.try(async () => {
const { body } = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/status',
});
expect(body.endpoint?.inference_id).to.eql(TINY_ELSER_INFERENCE_ID);
expect(body.endpoint?.service_settings.model_id).to.eql(TINY_ELSER_MODEL_ID);
});
});
it('have a deployed model', async () => {
await retry.try(async () => {
const { body } = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/status',
});
expect(body.kbState === KnowledgeBaseState.READY).to.be(true);
});
});
});
function createKnowledgeBaseEntry() {
const knowledgeBaseEntry = {
id: 'my-doc-id-1',
title: 'My title',
text: 'My content',
};
return observabilityAIAssistantAPIClient.editor({
endpoint: 'POST /internal/observability_ai_assistant/kb/entries/save',
params: { body: knowledgeBaseEntry },
});
}
});
}

View file

@ -0,0 +1,130 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import { KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/common';
import { sortBy } from 'lodash';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import {
getKnowledgeBaseEntriesFromEs,
getKnowledgeBaseEntriesFromApi,
} from '../utils/knowledge_base';
import {
createOrUpdateIndexAssets,
deleteIndexAssets,
restoreIndexAssets,
runStartupMigrations,
} from '../utils/index_assets';
import { restoreKbSnapshot } from '../utils/snapshots';
import {
deployTinyElserAndSetupKb,
teardownTinyElserModelAndInferenceEndpoint,
} from '../utils/model_and_inference';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
const es = getService('es');
const retry = getService('retry');
const log = getService('log');
// In 8.16 and earlier embeddings were stored in the `ml.tokens` field
// In 8.17 `ml.tokens` is replaced with `semantic_text` field and the custom ELSER inference endpoint "obs_ai_assistant_kb_inference" is introduced
// When upgrading we must ensure that the semantic_text field is populated
describe('when upgrading from 8.16 to 8.17', function () {
// Intentionally skipped in all serverless environnments (local and MKI)
// because the migration scenario being tested is not relevant to MKI and Serverless.
this.tags(['skipServerless']);
before(async () => {
await teardownTinyElserModelAndInferenceEndpoint(getService);
await deleteIndexAssets(es);
await restoreKbSnapshot({
log,
es,
snapshotFolderName: 'snapshot_kb_8.16',
snapshotName: 'kb_snapshot_8.16',
});
await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient);
await deployTinyElserAndSetupKb(getService);
});
after(async () => {
await teardownTinyElserModelAndInferenceEndpoint(getService);
await restoreIndexAssets(observabilityAIAssistantAPIClient, es);
});
describe('before migrating', () => {
it('the docs do not have semantic_text embeddings', async () => {
const hits = await getKnowledgeBaseEntriesFromEs(es);
const hasSemanticTextEmbeddings = hits.some((hit) => hit._source?.semantic_text);
expect(hits.length).to.be(60);
expect(hasSemanticTextEmbeddings).to.be(false);
});
});
describe('after migrating', () => {
before(async () => {
await runStartupMigrations(observabilityAIAssistantAPIClient);
});
it('the docs have semantic_text field', async () => {
await retry.try(async () => {
const hits = await getKnowledgeBaseEntriesFromEs(es);
const hasSemanticTextField = hits.every((hit) => hit._source?.semantic_text);
expect(hits.length).to.be(60);
expect(hasSemanticTextField).to.be(true);
});
});
it('the docs have embeddings', async () => {
await retry.try(async () => {
const hits = await getKnowledgeBaseEntriesFromEs(es);
const hasEmbeddings = hits.every(
(hit) =>
// @ts-expect-error
Object.keys(hit._source?.semantic_text.inference.chunks[0].embeddings).length > 0
);
expect(hits.length).to.be(60);
expect(hasEmbeddings).to.be(true);
});
});
it('returns entries correctly via API', async () => {
const res = await getKnowledgeBaseEntriesFromApi({ observabilityAIAssistantAPIClient });
expect(res.status).to.be(200);
expect(
sortBy(
res.body.entries
.filter(omitLensEntry)
.map(({ title, text, type }) => ({ title, text, type })),
({ title }) => title
)
).to.eql([
{
title: 'movie_quote',
type: 'contextual',
text: 'To infinity and beyond!',
},
{
title: 'user_color',
type: 'contextual',
text: "The user's favourite color is blue.",
},
]);
});
});
});
}
function omitLensEntry(entry?: KnowledgeBaseEntry) {
return entry?.labels?.category !== 'lens';
}

View file

@ -0,0 +1,99 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import { getKbIndexCreatedVersion, getKnowledgeBaseEntriesFromApi } from '../utils/knowledge_base';
import {
createOrUpdateIndexAssets,
deleteIndexAssets,
restoreIndexAssets,
runStartupMigrations,
} from '../utils/index_assets';
import { restoreKbSnapshot } from '../utils/snapshots';
import {
LEGACY_CUSTOM_INFERENCE_ID,
TINY_ELSER_MODEL_ID,
createTinyElserInferenceEndpoint,
deleteInferenceEndpoint,
deleteModel,
importModel,
} from '../utils/model_and_inference';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
const es = getService('es');
const retry = getService('retry');
const log = getService('log');
const ml = getService('ml');
// In 8.18 inference happens via the custom inference endpoint "obs_ai_assistant_kb_inference"
// In 8.19 / 9.1 the custom inference endpoint ("obs_ai_assistant_kb_inference") is replaced with the preconfigured endpoint ".elser-2-elasticsearch"
// We need to make sure that the custom inference endpoint continues to work after the migration
describe('when upgrading from 8.18 to 8.19', function () {
this.tags(['skipServerless']);
before(async () => {
await importModel(ml, { modelId: TINY_ELSER_MODEL_ID });
await createTinyElserInferenceEndpoint(getService, {
inferenceId: LEGACY_CUSTOM_INFERENCE_ID,
});
await deleteIndexAssets(es);
await restoreKbSnapshot({
log,
es,
snapshotFolderName: 'snapshot_kb_8.18',
snapshotName: 'kb_snapshot_8.18',
});
await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient);
await runStartupMigrations(observabilityAIAssistantAPIClient);
});
after(async () => {
await restoreIndexAssets(observabilityAIAssistantAPIClient, es);
await deleteModel(getService, { modelId: TINY_ELSER_MODEL_ID });
await deleteInferenceEndpoint(getService, { inferenceId: LEGACY_CUSTOM_INFERENCE_ID });
});
it('has an index created in 8.18', async () => {
await retry.try(async () => {
const indexVersion = await getKbIndexCreatedVersion(es);
expect(indexVersion).to.be('8.18.0');
});
});
it('can retrieve entries', async () => {
const res = await getKnowledgeBaseEntriesFromApi({ observabilityAIAssistantAPIClient });
expect(res.status).to.be(200);
expect(res.body.entries).to.have.length(1);
expect(res.body.entries[0].text).to.be(
'The user has a 10 meter tall pet dinosaur. It loves carrots.'
);
});
it('can add new entries to KB', async () => {
const res = await createKnowledgeBaseEntry();
expect(res.status).to.be(200);
});
function createKnowledgeBaseEntry() {
const knowledgeBaseEntry = {
id: 'my-doc-id-1',
title: 'My title',
text: 'My content',
};
return observabilityAIAssistantAPIClient.editor({
endpoint: 'POST /internal/observability_ai_assistant/kb/entries/save',
params: { body: knowledgeBaseEntry },
});
}
});
}

View file

@ -0,0 +1,174 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import { getInferenceIdFromWriteIndex } from '@kbn/observability-ai-assistant-plugin/server/service/knowledge_base_service/get_inference_id_from_write_index';
import { KnowledgeBaseEntry } from '@kbn/observability-ai-assistant-plugin/common';
import { resourceNames } from '@kbn/observability-ai-assistant-plugin/server/service';
import { isArray, isObject } from 'lodash';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import {
addSampleDocsToInternalKb,
getConcreteWriteIndexFromAlias,
getKnowledgeBaseEntriesFromApi,
getKnowledgeBaseEntriesFromEs,
setupKnowledgeBase,
waitForKnowledgeBaseIndex,
waitForKnowledgeBaseReady,
} from '../utils/knowledge_base';
import { restoreIndexAssets } from '../utils/index_assets';
import {
TINY_ELSER_INFERENCE_ID,
TINY_ELSER_MODEL_ID,
TINY_TEXT_EMBEDDING_INFERENCE_ID,
TINY_TEXT_EMBEDDING_MODEL_ID,
createTinyElserInferenceEndpoint,
createTinyTextEmbeddingInferenceEndpoint,
deleteInferenceEndpoint,
deleteModel,
importModel,
} from '../utils/model_and_inference';
import { animalSampleDocs } from '../utils/sample_docs';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const es = getService('es');
const ml = getService('ml');
const log = getService('log');
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
type KnowledgeBaseEsEntry = Awaited<ReturnType<typeof getKnowledgeBaseEntriesFromEs>>[0];
describe('when changing from ELSER to E5-like model', function () {
let elserEntriesFromApi: KnowledgeBaseEntry[];
let elserEntriesFromEs: KnowledgeBaseEsEntry[];
let elserInferenceId: string;
let elserWriteIndex: string;
let e5EntriesFromApi: KnowledgeBaseEntry[];
let e5EntriesFromEs: KnowledgeBaseEsEntry[];
let e5InferenceId: string;
let e5WriteIndex: string;
before(async () => {
await importModel(ml, { modelId: TINY_ELSER_MODEL_ID });
await createTinyElserInferenceEndpoint(getService, { inferenceId: TINY_ELSER_INFERENCE_ID });
await setupKnowledgeBase(observabilityAIAssistantAPIClient, TINY_ELSER_INFERENCE_ID);
await waitForKnowledgeBaseReady(getService);
// ingest documents
await addSampleDocsToInternalKb(getService, animalSampleDocs);
elserEntriesFromApi = (
await getKnowledgeBaseEntriesFromApi({ observabilityAIAssistantAPIClient })
).body.entries;
elserEntriesFromEs = await getKnowledgeBaseEntriesFromEs(es);
elserInferenceId = await getInferenceIdFromWriteIndex({ asInternalUser: es });
elserWriteIndex = await getConcreteWriteIndexFromAlias(es);
// setup KB with E5-like model
await importModel(ml, { modelId: TINY_TEXT_EMBEDDING_MODEL_ID });
await ml.api.startTrainedModelDeploymentES(TINY_TEXT_EMBEDDING_MODEL_ID);
await createTinyTextEmbeddingInferenceEndpoint(getService, {
inferenceId: TINY_TEXT_EMBEDDING_INFERENCE_ID,
});
await setupKnowledgeBase(observabilityAIAssistantAPIClient, TINY_TEXT_EMBEDDING_INFERENCE_ID);
await waitForKnowledgeBaseIndex(getService, '.kibana-observability-ai-assistant-kb-000002');
await waitForKnowledgeBaseReady(getService);
e5EntriesFromApi = (
await getKnowledgeBaseEntriesFromApi({ observabilityAIAssistantAPIClient })
).body.entries;
e5EntriesFromEs = await getKnowledgeBaseEntriesFromEs(es);
e5InferenceId = await getInferenceIdFromWriteIndex({ asInternalUser: es });
e5WriteIndex = await getConcreteWriteIndexFromAlias(es);
});
after(async () => {
// ELSER
await deleteModel(getService, { modelId: TINY_ELSER_MODEL_ID });
await deleteInferenceEndpoint(getService, { inferenceId: TINY_ELSER_INFERENCE_ID });
// E5-like
await deleteModel(getService, { modelId: TINY_TEXT_EMBEDDING_MODEL_ID });
await deleteInferenceEndpoint(getService, { inferenceId: TINY_TEXT_EMBEDDING_INFERENCE_ID });
await restoreIndexAssets(observabilityAIAssistantAPIClient, es);
});
describe('when model is ELSER', () => {
it('has correct write index name', async () => {
expect(elserWriteIndex).to.be(`${resourceNames.writeIndexAlias.kb}-000001`);
});
it('has correct number of entries', async () => {
expect(elserEntriesFromApi).to.have.length(5);
expect(elserEntriesFromEs).to.have.length(5);
});
it('has correct ELSER inference id', async () => {
expect(elserInferenceId).to.be(TINY_ELSER_INFERENCE_ID);
});
it('has sparse embeddings', async () => {
const embeddings = getEmbeddings(e5EntriesFromEs);
const hasSparseEmbeddings = embeddings.every((embedding) => {
return (
isObject(embedding) &&
Object.values(embedding).every((value) => typeof value === 'number')
);
});
if (!hasSparseEmbeddings) {
log.warning('Must be sparse embeddings. Found:', JSON.stringify(embeddings, null, 2));
}
expect(hasSparseEmbeddings).to.be(true);
});
});
describe('when model is changed to E5', () => {
it('has increments the index name', async () => {
expect(e5WriteIndex).to.be(`${resourceNames.writeIndexAlias.kb}-000002`);
});
it('returns the same entries from the API', async () => {
expect(e5EntriesFromApi).to.eql(elserEntriesFromApi);
});
it('has updates the inference id', async () => {
expect(e5InferenceId).to.be(TINY_TEXT_EMBEDDING_INFERENCE_ID);
});
it('has dense embeddings', async () => {
const embeddings = getEmbeddings(e5EntriesFromEs);
// dense embeddings are modelled as arrays of numbers
const hasDenseEmbeddings = embeddings.every((embedding) => {
return isArray(embedding) && embedding.every((value) => typeof value === 'number');
});
if (!hasDenseEmbeddings) {
log.warning('Must be dense embeddings. Found:', JSON.stringify(embeddings, null, 2));
}
expect(hasDenseEmbeddings).to.be(true);
});
});
function getEmbeddings(hits: KnowledgeBaseEsEntry[]) {
return hits.flatMap((hit) => {
return hit._source!._inference_fields.semantic_text.inference.chunks.semantic_text.map(
(chunk) => chunk.embeddings
);
});
}
});
}

View file

@ -1,136 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { orderBy } from 'lodash';
import expect from '@kbn/expect';
import { AI_ASSISTANT_KB_INFERENCE_ID } from '@kbn/observability-ai-assistant-plugin/server/service/inference_endpoint';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import {
deleteKnowledgeBaseModel,
clearKnowledgeBase,
setupKnowledgeBase,
getKnowledgeBaseEntries,
} from '../utils/knowledge_base';
import { restoreIndexAssets } from '../utils/index_assets';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
const esArchiver = getService('esArchiver');
const es = getService('es');
const retry = getService('retry');
const archive =
'x-pack/test/functional/es_archives/observability/ai_assistant/knowledge_base_8_15';
describe('when the knowledge base index was created before 8.15', function () {
// Intentionally skipped in all serverless environnments (local and MKI)
// because the migration scenario being tested is not relevant to MKI and Serverless.
this.tags(['skipServerless']);
before(async () => {
await deleteKnowledgeBaseModel(getService);
await restoreIndexAssets(observabilityAIAssistantAPIClient, es);
await clearKnowledgeBase(es);
await esArchiver.load(archive);
await setupKnowledgeBase(getService);
});
after(async () => {
await deleteKnowledgeBaseModel(getService);
await restoreIndexAssets(observabilityAIAssistantAPIClient, es);
});
describe('before migrating', () => {
it('the docs do not have semantic_text embeddings', async () => {
const hits = await getKnowledgeBaseEntries(es);
const hasSemanticTextEmbeddings = hits.some((hit) => hit._source?.semantic_text);
expect(hasSemanticTextEmbeddings).to.be(false);
});
});
describe('after migrating', () => {
before(async () => {
const { status } = await observabilityAIAssistantAPIClient.editor({
endpoint:
'POST /internal/observability_ai_assistant/kb/migrations/populate_missing_semantic_text_field',
});
expect(status).to.be(200);
});
it('the docs have semantic_text embeddings', async () => {
await retry.try(async () => {
const hits = await getKnowledgeBaseEntries(es);
const hasSemanticTextEmbeddings = hits.every((hit) => hit._source?.semantic_text);
expect(hasSemanticTextEmbeddings).to.be(true);
expect(
orderBy(hits, '_source.title').map(({ _source }) => {
const text = _source?.semantic_text;
const inference = _source?._inference_fields?.semantic_text?.inference;
return {
text: text ?? '',
inferenceId: inference?.inference_id,
chunkCount: inference?.chunks?.semantic_text?.length,
};
})
).to.eql([
{
text: 'To infinity and beyond!',
inferenceId: AI_ASSISTANT_KB_INFERENCE_ID,
chunkCount: 1,
},
{
text: "The user's favourite color is blue.",
inferenceId: AI_ASSISTANT_KB_INFERENCE_ID,
chunkCount: 1,
},
]);
});
});
it('returns entries correctly via API', async () => {
const { status } = await observabilityAIAssistantAPIClient.editor({
endpoint:
'POST /internal/observability_ai_assistant/kb/migrations/populate_missing_semantic_text_field',
});
expect(status).to.be(200);
const res = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/entries',
params: {
query: {
query: '',
sortBy: 'title',
sortDirection: 'asc',
},
},
});
expect(res.status).to.be(200);
expect(
res.body.entries.map(({ title, text, role, type }) => ({ title, text, role, type }))
).to.eql([
{
role: 'user_entry',
title: 'Toy Story quote',
type: 'contextual',
text: 'To infinity and beyond!',
},
{
role: 'assistant_summarization',
title: "User's favourite color",
type: 'contextual',
text: "The user's favourite color is blue.",
},
]);
});
});
});
}

View file

@ -7,15 +7,20 @@
import expect from '@kbn/expect';
import { times } from 'lodash';
import { resourceNames } from '@kbn/observability-ai-assistant-plugin/server/service';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import {
deleteKnowledgeBaseModel,
setupKnowledgeBase,
deleteKbIndices,
addSampleDocsToInternalKb,
getConcreteWriteIndexFromAlias,
reIndexKnowledgeBase,
} from '../utils/knowledge_base';
import { createOrUpdateIndexAssets } from '../utils/index_assets';
import { animalSampleDocs } from '../utils/sample_docs';
import {
deployTinyElserAndSetupKb,
teardownTinyElserModelAndInferenceEndpoint,
} from '../utils/model_and_inference';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
@ -29,13 +34,13 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
before(async () => {
await deleteKbIndices(es);
await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient);
await setupKnowledgeBase(getService);
await deployTinyElserAndSetupKb(getService);
});
after(async () => {
await deleteKbIndices(es);
await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient);
await deleteKnowledgeBaseModel(getService);
await teardownTinyElserModelAndInferenceEndpoint(getService);
});
describe('when running multiple re-index operations in parallel', () => {
@ -47,7 +52,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
before(async () => {
await addSampleDocsToInternalKb(getService, animalSampleDocs);
results = await Promise.all(times(20).map(() => reIndexKnowledgeBase()));
results = await Promise.all(times(20).map(() => _reIndexKnowledgeBase()));
});
it('makes 20 requests to the reindex endpoint', async () => {
@ -59,12 +64,12 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
expect(successResults).to.have.length(1);
});
it('should fail every request but 1', async () => {
it('should fail all requests but 1', async () => {
const failures = results.filter((result) => result.status !== 200);
expect(failures).to.have.length(19);
});
it('throw a LockAcquisitionException for the failing requests', async () => {
it('should throw a LockAcquisitionException for the failing requests', async () => {
const failures = results.filter((result) => result.status === 500);
const errorMessages = failures.every(
(result) =>
@ -75,23 +80,29 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
});
});
describe('when running multiple re-index operations in sequence', () => {
const iterations = 5;
describe(`when running ${iterations} re-index operations in sequence`, () => {
let results: Array<{ status: number; result: boolean; errorMessage: string | undefined }>;
let initialIndexSequenceNumber: number;
before(async () => {
const writeIndex = await getConcreteWriteIndexFromAlias(es);
// get sequence number from write index
initialIndexSequenceNumber = parseInt(writeIndex.slice(-6), 10);
results = [];
for (const _ of times(20)) {
results.push(await reIndexKnowledgeBase());
for (const _ of times(iterations)) {
results.push(await _reIndexKnowledgeBase());
}
});
it('makes 20 requests', async () => {
expect(results).to.have.length(20);
it(`makes ${iterations} requests`, async () => {
expect(results).to.have.length(iterations);
});
it('every re-index operation succeeds', async () => {
const successResults = results.filter((result) => result.status === 200);
expect(successResults).to.have.length(20);
expect(successResults).to.have.length(iterations);
expect(successResults.every((r) => r.result === true)).to.be(true);
});
@ -99,13 +110,19 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
const failures = results.filter((result) => result.status !== 200);
expect(failures).to.have.length(0);
});
it('should increment the write index sequence number', async () => {
const writeIndex = await getConcreteWriteIndexFromAlias(es);
const sequenceNumber = (iterations + initialIndexSequenceNumber)
.toString()
.padStart(6, '0'); // e.g. 000021
expect(writeIndex).to.be(`${resourceNames.writeIndexAlias.kb}-${sequenceNumber}`);
});
});
});
async function reIndexKnowledgeBase() {
const res = await observabilityAIAssistantAPIClient.editor({
endpoint: 'POST /internal/observability_ai_assistant/kb/reindex',
});
async function _reIndexKnowledgeBase() {
const res = await reIndexKnowledgeBase(observabilityAIAssistantAPIClient);
return {
status: res.status,

View file

@ -1,134 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import { resourceNames } from '@kbn/observability-ai-assistant-plugin/server/service';
import AdmZip from 'adm-zip';
import path from 'path';
import { AI_ASSISTANT_SNAPSHOT_REPO_PATH } from '../../../../default_configs/stateful.config.base';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import {
deleteKbIndices,
deleteKnowledgeBaseModel,
setupKnowledgeBase,
} from '../utils/knowledge_base';
import { createOrUpdateIndexAssets, restoreIndexAssets } from '../utils/index_assets';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
const es = getService('es');
const retry = getService('retry');
const log = getService('log');
describe('when the knowledge base index was created before 8.11', function () {
// Intentionally skipped in all serverless environnments (local and MKI)
// because the migration scenario being tested is not relevant to MKI and Serverless.
this.tags(['skipServerless']);
before(async () => {
await unZipKbSnapshot();
await setupKnowledgeBase(getService);
});
beforeEach(async () => {
await restoreKbSnapshot();
await createOrUpdateIndexAssets(observabilityAIAssistantAPIClient);
});
after(async () => {
await restoreIndexAssets(observabilityAIAssistantAPIClient, es);
await deleteKnowledgeBaseModel(getService);
});
it('has an index created version earlier than 8.11', async () => {
await retry.try(async () => {
expect(await getKbIndexCreatedVersion()).to.be.lessThan(8110000);
});
});
function createKnowledgeBaseEntry() {
const knowledgeBaseEntry = {
id: 'my-doc-id-1',
title: 'My title',
text: 'My content',
};
return observabilityAIAssistantAPIClient.editor({
endpoint: 'POST /internal/observability_ai_assistant/kb/entries/save',
params: { body: knowledgeBaseEntry },
});
}
it('cannot add new entries to KB', async () => {
const { status, body } = await createKnowledgeBaseEntry();
// @ts-expect-error
expect(body.message).to.eql(
'The index ".kibana-observability-ai-assistant-kb" does not support semantic text and must be reindexed. This re-index operation has been scheduled and will be started automatically. Please try again later.'
);
expect(status).to.be(503);
});
it('can add new entries after re-indexing', async () => {
await reIndexKnowledgeBase();
await retry.try(async () => {
const { status } = await createKnowledgeBaseEntry();
expect(status).to.be(200);
});
});
});
async function getKbIndexCreatedVersion() {
const indexSettings = await es.indices.getSettings({
index: resourceNames.concreteIndexName.kb,
});
const { settings } = Object.values(indexSettings)[0];
return parseInt(settings?.index?.version?.created ?? '', 10);
}
async function unZipKbSnapshot() {
const zipFilePath = `${AI_ASSISTANT_SNAPSHOT_REPO_PATH}.zip`;
log.debug(`Unzipping ${zipFilePath} to ${AI_ASSISTANT_SNAPSHOT_REPO_PATH}`);
new AdmZip(zipFilePath).extractAllTo(path.dirname(AI_ASSISTANT_SNAPSHOT_REPO_PATH), true);
}
async function restoreKbSnapshot() {
await deleteKbIndices(es);
log.debug(
`Restoring snapshot of ${resourceNames.concreteIndexName.kb} from ${AI_ASSISTANT_SNAPSHOT_REPO_PATH}`
);
const snapshotRepoName = 'snapshot-repo-8-10';
const snapshotName = 'my_snapshot';
await es.snapshot.createRepository({
name: snapshotRepoName,
repository: {
type: 'fs',
settings: { location: AI_ASSISTANT_SNAPSHOT_REPO_PATH },
},
});
await es.snapshot.restore({
repository: snapshotRepoName,
snapshot: snapshotName,
wait_for_completion: true,
indices: resourceNames.concreteIndexName.kb,
});
await es.snapshot.deleteRepository({ name: snapshotRepoName });
}
async function reIndexKnowledgeBase() {
const { status } = await observabilityAIAssistantAPIClient.editor({
endpoint: 'POST /internal/observability_ai_assistant/kb/reindex',
});
expect(status).to.be(200);
}
}

View file

@ -6,58 +6,166 @@
*/
import expect from '@kbn/expect';
import { resourceNames } from '@kbn/observability-ai-assistant-plugin/server/service';
import { getInferenceIdFromWriteIndex } from '@kbn/observability-ai-assistant-plugin/server/service/knowledge_base_service/get_inference_id_from_write_index';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import { TINY_MODELS, deleteKnowledgeBaseModel, setupKnowledgeBase } from '../utils/knowledge_base';
import { restoreIndexAssets } from '../utils/index_assets';
import { getComponentTemplate, restoreIndexAssets } from '../utils/index_assets';
import {
TINY_ELSER_INFERENCE_ID,
TINY_ELSER_MODEL_ID,
createTinyElserInferenceEndpoint,
deleteInferenceEndpoint,
deployTinyElserAndSetupKb,
importModel,
deleteModel,
teardownTinyElserModelAndInferenceEndpoint,
} from '../utils/model_and_inference';
import {
getConcreteWriteIndexFromAlias,
waitForKnowledgeBaseReady,
setupKnowledgeBase,
} from '../utils/knowledge_base';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const es = getService('es');
const retry = getService('retry');
const ml = getService('ml');
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
describe('/internal/observability_ai_assistant/kb/setup', function () {
before(async () => {
await deleteKnowledgeBaseModel(getService);
await teardownTinyElserModelAndInferenceEndpoint(getService);
await restoreIndexAssets(observabilityAIAssistantAPIClient, es);
});
afterEach(async () => {
await deleteKnowledgeBaseModel(getService);
await teardownTinyElserModelAndInferenceEndpoint(getService);
await restoreIndexAssets(observabilityAIAssistantAPIClient, es);
});
it('returns model info when successful', async () => {
const res = await setupKnowledgeBase(getService);
expect(res.body.service_settings.model_id).to.be('pt_tiny_elser');
expect(res.body.inference_id).to.be('obs_ai_assistant_kb_inference');
it('returns 200 when model is deployed', async () => {
const { status } = await deployTinyElserAndSetupKb(getService);
expect(status).to.be(200);
});
it('returns error message if model is not deployed', async () => {
const res = await setupKnowledgeBase(getService, { deployModel: false });
it('returns 200 if model is not deployed', async () => {
const { status } = await setupKbAsAdmin(TINY_ELSER_INFERENCE_ID);
expect(status).to.be(200);
});
expect(res.status).to.be(500);
it('has "pt_tiny_elser_inference_id" as initial inference id', async () => {
const inferenceId = await getInferenceIdFromWriteIndex({ asInternalUser: es });
expect(inferenceId).to.be(TINY_ELSER_INFERENCE_ID);
});
// @ts-expect-error
expect(res.body.message).to.include.string(
'No known trained model with model_id [pt_tiny_elser]'
);
describe('re-indexing', () => {
describe('running setup for a different inference endpoint', () => {
const CUSTOM_TINY_ELSER_INFERENCE_ID = 'custom_tiny_elser_inference_id';
let body: Awaited<ReturnType<typeof setupKbAsAdmin>>['body'];
// @ts-expect-error
expect(res.body.statusCode).to.be(500);
before(async () => {
// setup KB initially
await deployTinyElserAndSetupKb(getService);
// setup KB with custom inference endpoint
await createTinyElserInferenceEndpoint(getService, {
inferenceId: CUSTOM_TINY_ELSER_INFERENCE_ID,
});
const res = await setupKbAsAdmin(CUSTOM_TINY_ELSER_INFERENCE_ID);
body = res.body;
await waitForKnowledgeBaseReady(getService);
});
after(async () => {
await deleteInferenceEndpoint(getService, {
inferenceId: CUSTOM_TINY_ELSER_INFERENCE_ID,
});
});
it('should re-index the KB', async () => {
expect(body.reindex).to.be(true);
expect(body.currentInferenceId).to.be(TINY_ELSER_INFERENCE_ID);
expect(body.nextInferenceId).to.be(CUSTOM_TINY_ELSER_INFERENCE_ID);
await expectWriteIndexName(`${resourceNames.writeIndexAlias.kb}-000002`);
});
});
describe('running setup for the same inference id', () => {
let body: Awaited<ReturnType<typeof setupKbAsAdmin>>['body'];
before(async () => {
await deployTinyElserAndSetupKb(getService);
const res = await setupKbAsAdmin(TINY_ELSER_INFERENCE_ID);
body = res.body;
});
it('does not re-index', async () => {
expect(body.reindex).to.be(false);
expect(body.currentInferenceId).to.be(TINY_ELSER_INFERENCE_ID);
expect(body.nextInferenceId).to.be(TINY_ELSER_INFERENCE_ID);
await expectWriteIndexName(`${resourceNames.writeIndexAlias.kb}-000001`);
});
});
});
describe('when installing a custom inference endpoint', function () {
const customInferenceId = 'my_custom_inference_id';
before(async () => {
await restoreIndexAssets(observabilityAIAssistantAPIClient, es);
await importModel(ml, { modelId: TINY_ELSER_MODEL_ID });
await createTinyElserInferenceEndpoint(getService, {
inferenceId: customInferenceId,
});
await setupKnowledgeBase(observabilityAIAssistantAPIClient, customInferenceId);
await waitForKnowledgeBaseReady(getService);
});
after(async () => {
await deleteModel(getService, { modelId: TINY_ELSER_MODEL_ID });
await deleteInferenceEndpoint(getService, { inferenceId: customInferenceId });
});
it('has correct semantic_text mapping in component template', async () => {
const res = await getComponentTemplate(es);
const semanticTextMapping = res.component_template.template.mappings?.properties
?.semantic_text as { inference_id: string };
expect(semanticTextMapping.inference_id).to.be(customInferenceId);
});
});
describe('security roles and access privileges', () => {
it('should deny access for users without the ai_assistant privilege', async () => {
const { status } = await observabilityAIAssistantAPIClient.viewer({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
params: {
query: {
model_id: TINY_MODELS.ELSER,
},
},
});
const { status } = await setupKbAsViewer(TINY_ELSER_INFERENCE_ID);
expect(status).to.be(403);
});
});
});
async function expectWriteIndexName(expectedName: string) {
await retry.try(async () => {
const writeIndex = await getConcreteWriteIndexFromAlias(es);
expect(writeIndex).to.be(expectedName);
});
}
function setupKbAsAdmin(inferenceId: string) {
return observabilityAIAssistantAPIClient.admin({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
params: {
query: { inference_id: inferenceId },
},
});
}
function setupKbAsViewer(inferenceId: string) {
return observabilityAIAssistantAPIClient.viewer({
endpoint: 'POST /internal/observability_ai_assistant/kb/setup',
params: {
query: { inference_id: inferenceId },
},
});
}
}

View file

@ -9,70 +9,96 @@ import expect from '@kbn/expect';
import { KnowledgeBaseState } from '@kbn/observability-ai-assistant-plugin/common';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import {
deleteKnowledgeBaseModel,
teardownTinyElserModelAndInferenceEndpoint,
deleteInferenceEndpoint,
setupKnowledgeBase,
TINY_MODELS,
} from '../utils/knowledge_base';
deployTinyElserAndSetupKb,
TINY_ELSER_MODEL_ID,
TINY_ELSER_INFERENCE_ID,
deleteModel,
stopTinyElserModel,
} from '../utils/model_and_inference';
export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const es = getService('es');
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
describe('/internal/observability_ai_assistant/kb/status', function () {
// see details: https://github.com/elastic/kibana/issues/219217
this.tags(['failsOnMKI']);
beforeEach(async () => {
await setupKnowledgeBase(getService);
});
afterEach(async () => {
await deleteKnowledgeBaseModel(getService);
});
it('returns correct status after knowledge base is setup', async () => {
it('returns correct status before knowledge base is setup', async () => {
const res = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/status',
});
expect(res.status).to.be(200);
expect(res.body.kbState).to.be(KnowledgeBaseState.READY);
expect(res.body.kbState).to.be(KnowledgeBaseState.NOT_INSTALLED);
expect(res.body.enabled).to.be(true);
expect(res.body.endpoint?.service_settings?.model_id).to.eql(TINY_MODELS.ELSER);
});
it('returns correct status after model is deleted', async () => {
await deleteKnowledgeBaseModel(getService, { shouldDeleteInferenceEndpoint: false });
const res = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/status',
describe('after the knowledge base has been set up', () => {
before(async () => {
await deployTinyElserAndSetupKb(getService);
});
expect(res.status).to.be(200);
expect(res.body.kbState).to.not.be(KnowledgeBaseState.READY);
expect(res.body.enabled).to.be(true);
expect(res.body.errorMessage).to.include.string(
'No known trained model with model_id [pt_tiny_elser]'
);
});
it('returns correct status after inference endpoint is deleted', async () => {
await deleteInferenceEndpoint({ es });
const res = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/status',
after(async () => {
await teardownTinyElserModelAndInferenceEndpoint(getService);
});
expect(res.status).to.be(200);
it('returns the correct status when the knowledge base is successfully installed', async () => {
const res = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/status',
});
expect(res.body.kbState).to.not.be(KnowledgeBaseState.READY);
expect(res.body.enabled).to.be(true);
expect(res.body.errorMessage).to.include.string(
'Inference endpoint not found [obs_ai_assistant_kb_inference]'
);
expect(res.status).to.be(200);
expect(res.body.kbState).to.be(KnowledgeBaseState.READY);
expect(res.body.enabled).to.be(true);
expect(res.body.endpoint?.service_settings?.model_id).to.eql(TINY_ELSER_MODEL_ID);
});
it('returns MODEL_PENDING_DEPLOYMENT status after the model deployment is stopped', async () => {
await stopTinyElserModel(getService);
const res = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/status',
});
expect(res.status).to.be(200);
expect(res.body.kbState).to.be(KnowledgeBaseState.MODEL_PENDING_DEPLOYMENT);
});
it('returns the correct status after the model is deleted', async () => {
await deleteModel(getService, { modelId: TINY_ELSER_MODEL_ID });
const res = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/status',
});
expect(res.status).to.be(200);
expect(res.body.kbState).to.be(KnowledgeBaseState.NOT_INSTALLED);
expect(res.body.enabled).to.be(true);
expect(res.body.errorMessage).to.include.string(
'No known trained model with model_id [pt_tiny_elser]'
);
});
it('returns the correct status after inference endpoint is deleted', async () => {
await deleteInferenceEndpoint(getService, { inferenceId: TINY_ELSER_INFERENCE_ID });
const res = await observabilityAIAssistantAPIClient.editor({
endpoint: 'GET /internal/observability_ai_assistant/kb/status',
});
expect(res.status).to.be(200);
expect(res.body.kbState).to.be(KnowledgeBaseState.NOT_INSTALLED);
expect(res.body.enabled).to.be(true);
expect(res.body.errorMessage).to.include.string(
'Inference endpoint not found [pt_tiny_elser_inference_id]'
);
});
});
describe('security roles and access privileges', () => {
@ -80,6 +106,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
const { status } = await observabilityAIAssistantAPIClient.viewer({
endpoint: 'GET /internal/observability_ai_assistant/kb/status',
});
expect(status).to.be(403);
});
});

View file

@ -12,16 +12,16 @@ import { CONTEXT_FUNCTION_NAME } from '@kbn/observability-ai-assistant-plugin/se
import { Instruction } from '@kbn/observability-ai-assistant-plugin/common/types';
import pRetry from 'p-retry';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import {
clearKnowledgeBase,
deleteKnowledgeBaseModel,
setupKnowledgeBase,
} from '../utils/knowledge_base';
import { clearKnowledgeBase } from '../utils/knowledge_base';
import {
LlmProxy,
createLlmProxy,
} from '../../../../../../observability_ai_assistant_api_integration/common/create_llm_proxy';
import { clearConversations, getConversationCreatedEvent } from '../utils/conversation';
import {
deployTinyElserAndSetupKb,
teardownTinyElserModelAndInferenceEndpoint,
} from '../utils/model_and_inference';
const sortById = (data: Array<Instruction & { public?: boolean }>) => sortBy(data, 'id');
@ -33,11 +33,11 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
describe('Knowledge base user instructions', function () {
before(async () => {
await setupKnowledgeBase(getService);
await deployTinyElserAndSetupKb(getService);
});
after(async () => {
await deleteKnowledgeBaseModel(getService);
await teardownTinyElserModelAndInferenceEndpoint(getService);
await clearKnowledgeBase(es);
await clearConversations(es);
});
@ -274,7 +274,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
expect(status).to.be(200);
void proxy.interceptTitle('This is a conversation title');
void proxy.interceptConversation('I, the LLM, hear you!');
void proxy.interceptWithResponse('I, the LLM, hear you!');
const messages: Message[] = [
{
@ -440,7 +440,7 @@ export default function ApiTest({ getService }: DeploymentAgnosticFtrProviderCon
});
it('includes private KB instructions in the system message sent to the LLM', async () => {
const simulatorPromise = proxy.interceptConversation('Hello from LLM Proxy');
const simulatorPromise = proxy.interceptWithResponse('Hello from LLM Proxy');
const messages: Message[] = [
{
'@timestamp': new Date().toISOString(),

View file

@ -0,0 +1,62 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import expect from '@kbn/expect';
import type { DeploymentAgnosticFtrProviderContext } from '../../../../ftr_provider_context';
import {
TINY_ELSER_INFERENCE_ID,
teardownTinyElserModelAndInferenceEndpoint,
setupTinyElserModelAndInferenceEndpoint,
} from '../utils/model_and_inference';
export default function WarmupModelApiTest({ getService }: DeploymentAgnosticFtrProviderContext) {
const observabilityAIAssistantAPIClient = getService('observabilityAIAssistantApi');
function warmupKbAsAdmin(inferenceId: string) {
return observabilityAIAssistantAPIClient.admin({
endpoint: 'POST /internal/observability_ai_assistant/kb/warmup_model',
params: {
query: {
inference_id: inferenceId,
},
},
});
}
function warmupKbAsViewer(inferenceId: string) {
return observabilityAIAssistantAPIClient.viewer({
endpoint: 'POST /internal/observability_ai_assistant/kb/warmup_model',
params: {
query: {
inference_id: inferenceId,
},
},
});
}
describe('/internal/observability_ai_assistant/kb/warmup_model', function () {
const inferenceId = TINY_ELSER_INFERENCE_ID;
before(async () => {
await setupTinyElserModelAndInferenceEndpoint(getService);
});
after(async () => {
await teardownTinyElserModelAndInferenceEndpoint(getService);
});
it('returns 200 and triggers model warmup', async () => {
const response = await warmupKbAsAdmin(inferenceId);
expect(response.status).to.be(200);
});
it('should deny access for users without the ai_assistant privilege', async () => {
const response = await warmupKbAsViewer(inferenceId);
expect(response.status).to.be(403);
});
});
}

View file

@ -0,0 +1,33 @@
{
"min_version": "7.12.0",
"uuid": "Z32sceNySTyUXskXH4H93g",
"cluster_id": "pN2COvnNRlioIBagZPodZA",
"snapshots": [
{
"name": "my_snapshot",
"uuid": "9Wej6Q4gRZWSblPzNrtLOQ",
"state": 1,
"index_metadata_lookup": {
"oYscBShISaWpDm7n1KXkCA": "X0NlkruTSgCxTsBO_mA37g-_na_-3-1-1"
},
"version": 8100599,
"start_time_millis": 1739275733654,
"end_time_millis": 1739275734671,
"slm_policy": ""
}
],
"indices": {
".kibana-observability-ai-assistant-kb-000001": {
"id": "oYscBShISaWpDm7n1KXkCA",
"snapshots": [
"9Wej6Q4gRZWSblPzNrtLOQ"
],
"shard_generations": [
"KxGoXW9rQlmlCfPaW8lMYA"
]
}
},
"index_metadata_identifiers": {
"X0NlkruTSgCxTsBO_mA37g-_na_-3-1-1": "3cLp9JQBDFLNeO2pjufJ"
}
}

View file

@ -0,0 +1 @@
{"min_version":"7.12.0","uuid":"bPmLsBiwRb2PstKIPFVa9g","cluster_id":"RiFYfwi9SL-ADRkzOQBk6w","snapshots":[{"name":"kb_snapshot_8.16","uuid":"y1zcTBUTQFuKFwMMbi7VvQ","state":1,"index_metadata_lookup":{"2MSR7bXrSyyoo2ZmErlBkg":"AwNXKEXNQO6os9NaVvvuQg-_na_-2-2-1"},"version":"8.11.0","index_version":8518000,"start_time_millis":1746016036173,"end_time_millis":1746016036580,"slm_policy":""}],"indices":{".kibana-observability-ai-assistant-kb-000001":{"id":"2MSR7bXrSyyoo2ZmErlBkg","snapshots":["y1zcTBUTQFuKFwMMbi7VvQ"],"shard_generations":["FWDT04_LTIaFE_BxwPeJfA"]}},"index_metadata_identifiers":{"AwNXKEXNQO6os9NaVvvuQg-_na_-2-2-1":"ZOmqhpYBNSJu_urmZ8Mp"}}

Some files were not shown because too many files have changed in this diff Show more