Update dependency @elastic/elasticsearch to ^8.15.0 (main) (#190378)

Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com>
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
Co-authored-by: Alejandro Fernández Haro <alejandro.haro@elastic.co>
Co-authored-by: Walter Rafelsberger <walter.rafelsberger@elastic.co>
This commit is contained in:
elastic-renovate-prod[bot] 2024-08-23 12:48:10 -05:00 committed by GitHub
parent 745ecfdd30
commit 68a924411b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 145 additions and 145 deletions

View file

@ -10,7 +10,7 @@ import type { IScopedClusterClient } from '@kbn/core/server';
import { JOB_MAP_NODE_TYPES, type MapElements } from '@kbn/ml-data-frame-analytics-utils';
import { flatten } from 'lodash';
import type {
InferenceModelConfig,
InferenceInferenceEndpoint,
InferenceTaskType,
TasksTaskInfo,
TransformGetTransformTransformSummary,
@ -591,19 +591,19 @@ export class ModelsProvider {
* Puts the requested Inference endpoint id into elasticsearch, triggering elasticsearch to create the inference endpoint id
* @param inferenceId - Inference Endpoint Id
* @param taskType - Inference Task type. Either sparse_embedding or text_embedding
* @param modelConfig - Model configuration based on service type
* @param inferenceConfig - Model configuration based on service type
*/
async createInferenceEndpoint(
inferenceId: string,
taskType: InferenceTaskType,
modelConfig: InferenceModelConfig
inferenceConfig: InferenceInferenceEndpoint
) {
try {
const result = await this._client.asCurrentUser.inference.putModel(
const result = await this._client.asCurrentUser.inference.put(
{
inference_id: inferenceId,
task_type: taskType,
model_config: modelConfig,
inference_config: inferenceConfig,
},
{ maxRetries: 0 }
);
@ -613,7 +613,7 @@ export class ModelsProvider {
// Erroring out is misleading in these cases, so we return the model_id and task_type
if (error.name === 'TimeoutError') {
return {
model_id: modelConfig.service,
model_id: inferenceConfig.service,
task_type: taskType,
};
} else {