mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
[ML] Support adding inference_id in a flyout (#180330)
## Summary
* Creates a new package - ` @kbn/inference_integration_flyout`
* Above package has implementation for adding new inference endpoint id,
including Elastic, third party integration and instructions to upload
via Eland python client, in a flyout.
* The above package is used for supporting semantic_text feature and is
accessed via`add a new inference endpoint` button
## Screen Recording
dbd36634
-bd4a-49f1-b1d5-d7b6c90444bc
### Checklist
- [ ] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md)
- [ ] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
- [ ] [Flaky Test
Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was
used on any tests changed
---------
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
9e797cbc1e
commit
439f4a8876
54 changed files with 2122 additions and 362 deletions
|
@ -9,7 +9,11 @@ import Boom from '@hapi/boom';
|
|||
import type { IScopedClusterClient } from '@kbn/core/server';
|
||||
import { JOB_MAP_NODE_TYPES, type MapElements } from '@kbn/ml-data-frame-analytics-utils';
|
||||
import { flatten } from 'lodash';
|
||||
import type { TransformGetTransformTransformSummary } from '@elastic/elasticsearch/lib/api/types';
|
||||
import type {
|
||||
InferenceModelConfig,
|
||||
InferenceTaskType,
|
||||
TransformGetTransformTransformSummary,
|
||||
} from '@elastic/elasticsearch/lib/api/types';
|
||||
import type { IndexName, IndicesIndexState } from '@elastic/elasticsearch/lib/api/types';
|
||||
import type {
|
||||
IngestPipeline,
|
||||
|
@ -581,4 +585,21 @@ export class ModelsProvider {
|
|||
await mlSavedObjectService.updateTrainedModelsSpaces([modelId], ['*'], []);
|
||||
return putResponse;
|
||||
}
|
||||
/**
|
||||
* Puts the requested Inference endpoint id into elasticsearch, triggering elasticsearch to create the inference endpoint id
|
||||
* @param inferenceId - Inference Endpoint Id
|
||||
* @param taskType - Inference Task type. Either sparse_embedding or text_embedding
|
||||
* @param modelConfig - Model configuration based on service type
|
||||
*/
|
||||
async createInferenceEndpoint(
|
||||
inferenceId: string,
|
||||
taskType: InferenceTaskType,
|
||||
modelConfig: InferenceModelConfig
|
||||
) {
|
||||
return await this._client.asCurrentUser.inference.putModel({
|
||||
inference_id: inferenceId,
|
||||
task_type: taskType,
|
||||
model_config: modelConfig,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue