mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
# Backport This will backport the following commits from `main` to `8.x`: - [Pass system message to inferenceCliente.chatComplete (#211263)](https://github.com/elastic/kibana/pull/211263) <!--- Backport version: 9.4.3 --> ### Questions ? Please refer to the [Backport tool documentation](https://github.com/sqren/backport) <!--BACKPORT [{"author":{"name":"Arturo Lidueña","email":"arturo.liduena@elastic.co"},"sourceCommit":{"committedDate":"2025-02-16T08:43:50Z","message":"Pass system message to inferenceCliente.chatComplete (#211263)\n\nCloses #211257 \r\n\r\n## Summary\r\n\r\nRegression introduced in 8.18\r\n(https://github.com/elastic/kibana/pull/199286)\r\n\r\nWe no longer pass the `system` message to the inference plugin, and\r\nthereby the LLM. This means that we are only passing user messages to\r\nthe LLM. The system message is important in steering the conversation,\r\nand providing guardrails to the LLM.","sha":"117802cbb2ba73df14f82a2ee1caee1bfe5b1ced","branchLabelMapping":{"^v9.1.0$":"main","^v8.19.0$":"8.x","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:fix","v9.0.0","Team:Obs AI Assistant","backport:version","v8.18.0","v9.1.0","v8.19.0"],"title":"Pass system message to inferenceCliente.chatComplete","number":211263,"url":"https://github.com/elastic/kibana/pull/211263","mergeCommit":{"message":"Pass system message to inferenceCliente.chatComplete (#211263)\n\nCloses #211257 \r\n\r\n## Summary\r\n\r\nRegression introduced in 8.18\r\n(https://github.com/elastic/kibana/pull/199286)\r\n\r\nWe no longer pass the `system` message to the inference plugin, and\r\nthereby the LLM. This means that we are only passing user messages to\r\nthe LLM. The system message is important in steering the conversation,\r\nand providing guardrails to the LLM.","sha":"117802cbb2ba73df14f82a2ee1caee1bfe5b1ced"}},"sourceBranch":"main","suggestedTargetBranches":["9.0","8.18","8.x"],"targetPullRequestStates":[{"branch":"9.0","label":"v9.0.0","branchLabelMappingKey":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"8.18","label":"v8.18.0","branchLabelMappingKey":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"main","label":"v9.1.0","branchLabelMappingKey":"^v9.1.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/211263","number":211263,"mergeCommit":{"message":"Pass system message to inferenceCliente.chatComplete (#211263)\n\nCloses #211257 \r\n\r\n## Summary\r\n\r\nRegression introduced in 8.18\r\n(https://github.com/elastic/kibana/pull/199286)\r\n\r\nWe no longer pass the `system` message to the inference plugin, and\r\nthereby the LLM. This means that we are only passing user messages to\r\nthe LLM. The system message is important in steering the conversation,\r\nand providing guardrails to the LLM.","sha":"117802cbb2ba73df14f82a2ee1caee1bfe5b1ced"}},{"branch":"8.x","label":"v8.19.0","branchLabelMappingKey":"^v8.19.0$","isSourceBranch":false,"state":"NOT_CREATED"}]}] BACKPORT--> Co-authored-by: Arturo Lidueña <arturo.liduena@elastic.co>
This commit is contained in:
parent
c2797a3208
commit
04039644e9
2 changed files with 5 additions and 0 deletions
|
@ -346,6 +346,7 @@ describe('Observability AI Assistant client', () => {
|
|||
{
|
||||
connectorId: 'foo',
|
||||
stream: true,
|
||||
system: EXPECTED_STORED_SYSTEM_MESSAGE,
|
||||
messages: expect.arrayContaining([
|
||||
{ role: 'user', content: 'How many alerts do I have?' },
|
||||
]),
|
||||
|
@ -916,6 +917,7 @@ describe('Observability AI Assistant client', () => {
|
|||
{
|
||||
connectorId: 'foo',
|
||||
stream: true,
|
||||
system: EXPECTED_STORED_SYSTEM_MESSAGE,
|
||||
messages: expect.arrayContaining([
|
||||
{ role: 'user', content: 'How many alerts do I have?' },
|
||||
]),
|
||||
|
@ -1077,6 +1079,7 @@ describe('Observability AI Assistant client', () => {
|
|||
{
|
||||
connectorId: 'foo',
|
||||
stream: true,
|
||||
system: EXPECTED_STORED_SYSTEM_MESSAGE,
|
||||
messages: expect.arrayContaining([
|
||||
{ role: 'user', content: 'How many alerts do I have?' },
|
||||
]),
|
||||
|
|
|
@ -508,6 +508,8 @@ export class ObservabilityAIAssistantClient {
|
|||
|
||||
const options = {
|
||||
connectorId,
|
||||
system: messages.find((message) => message.message.role === MessageRole.System)?.message
|
||||
.content,
|
||||
messages: convertMessagesForInference(
|
||||
messages.filter((message) => message.message.role !== MessageRole.System)
|
||||
),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue