[ML] Trained models: Adds workflow for creating ingest pipeline for a trained model (#170902)

## Summary

Related issue: https://github.com/elastic/kibana/issues/168988

This PR adds the ability to create an ingest pipeline using a trained
model for inference.

From within 'Test model' flyout - adds a `Create pipeline` button that
opens another 'Create pipeline' flyout (similar to the DFA models deploy
model flyout).

This flyout utilizes the configuration utilized for testing the model.

<img width="1380" alt="image"
src="cc89aa56-13d3-47df-9a7b-4b68bc010aa2">


<img width="1374" alt="image"
src="fc2c8acd-db46-4f9b-a1af-a3000f6ae90c">


<img width="1379" alt="image"
src="7e67bc87-0bbc-4345-b2d2-a17f0240f7ba">


<img width="1407" alt="image"
src="15952378-c0c7-44d7-93ca-b4051b530186">


<img width="1371" alt="image"
src="ce0f8f38-77c5-4d1e-b55c-cc9c9a693220">

<img width="1381" alt="image"
src="cd239209-a61e-4496-b883-62d298fd5ace">



### Checklist

Delete any items that are not applicable to this PR.

- [ ] Any text added follows [EUI's writing
guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses
sentence case text and includes [i18n
support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md)
- [ ]
[Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html)
was added for features that require explanation or tutorials
- [ ] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
- [ ] Any UI touched in this PR is usable by keyboard only (learn more
about [keyboard accessibility](https://webaim.org/techniques/keyboard/))
- [ ] Any UI touched in this PR does not create any new axe failures
(run axe in browser:
[FF](https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/),
[Chrome](https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US))
- [ ] If a plugin configuration key changed, check if it needs to be
allowlisted in the cloud and added to the [docker
list](https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker)
- [ ] This renders correctly on smaller devices using a responsive
layout. (You can test this [in your
browser](https://www.browserstack.com/guide/responsive-testing-on-local-server))
- [ ] This was checked for [cross-browser
compatibility](https://www.elastic.co/support/matrix#matrix_browsers)

---------

Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
Melissa Alvarez 2023-12-19 08:45:52 -07:00 committed by GitHub
parent 99763dc616
commit 4fc4dfbbda
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
35 changed files with 1592 additions and 387 deletions

View file

@ -671,6 +671,7 @@ export const getDocLinks = ({ kibanaBranch, buildFlavor }: GetDocLinkOptions): D
cronExpressions: `${ELASTICSEARCH_DOCS}cron-expressions.html`,
executeWatchActionModes: `${ELASTICSEARCH_DOCS}watcher-api-execute-watch.html#watcher-api-execute-watch-action-mode`,
indexExists: `${ELASTICSEARCH_DOCS}indices-exists.html`,
inferTrainedModel: `${ELASTICSEARCH_DOCS}infer-trained-model.html`,
multiSearch: `${ELASTICSEARCH_DOCS}search-multi-search.html`,
openIndex: `${ELASTICSEARCH_DOCS}indices-open-close.html`,
putComponentTemplate: `${ELASTICSEARCH_DOCS}indices-component-template.html`,

View file

@ -391,6 +391,7 @@ export interface DocLinks {
cronExpressions: string;
executeWatchActionModes: string;
indexExists: string;
inferTrainedModel: string;
multiSearch: string;
openIndex: string;
putComponentTemplate: string;

View file

@ -22,14 +22,14 @@ import { extractErrorProperties } from '@kbn/ml-error-utils';
import { ModelItem } from '../../model_management/models_list';
import type { AddInferencePipelineSteps } from './types';
import { ADD_INFERENCE_PIPELINE_STEPS } from './constants';
import { AddInferencePipelineFooter } from './components/add_inference_pipeline_footer';
import { AddInferencePipelineHorizontalSteps } from './components/add_inference_pipeline_horizontal_steps';
import { AddInferencePipelineFooter } from '../shared';
import { AddInferencePipelineHorizontalSteps } from '../shared';
import { getInitialState, getModelType } from './state';
import { PipelineDetails } from './components/pipeline_details';
import { ProcessorConfiguration } from './components/processor_configuration';
import { OnFailureConfiguration } from './components/on_failure_configuration';
import { OnFailureConfiguration } from '../shared';
import { TestPipeline } from './components/test_pipeline';
import { ReviewAndCreatePipeline } from './components/review_and_create_pipeline';
import { ReviewAndCreatePipeline } from '../shared';
import { useMlApiContext } from '../../contexts/kibana';
import { getPipelineConfig } from './get_pipeline_config';
import { validateInferencePipelineConfigurationStep } from './validation';
@ -122,6 +122,8 @@ export const AddInferencePipelineFlyout: FC<AddInferencePipelineFlyoutProps> = (
setStep={setStep}
isDetailsStepValid={pipelineNameError === undefined && targetFieldError === undefined}
isConfigureProcessorStepValid={hasUnsavedChanges === false}
hasProcessorStep
pipelineCreated={formState.pipelineCreated}
/>
<EuiSpacer size="m" />
{step === ADD_INFERENCE_PIPELINE_STEPS.DETAILS && (
@ -184,6 +186,7 @@ export const AddInferencePipelineFlyout: FC<AddInferencePipelineFlyoutProps> = (
isConfigureProcessorStepValid={hasUnsavedChanges === false}
pipelineCreated={formState.pipelineCreated}
creatingPipeline={formState.creatingPipeline}
hasProcessorStep
/>
</EuiFlyoutFooter>
</EuiFlyout>

View file

@ -14,18 +14,14 @@ import {
EuiFlexItem,
EuiForm,
EuiFormRow,
EuiLink,
EuiSpacer,
EuiTitle,
EuiText,
EuiTextArea,
EuiPanel,
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n-react';
import { useMlKibana } from '../../../contexts/kibana';
import type { MlInferenceState } from '../types';
import { PipelineDetailsTitle } from '../../shared';
import { PipelineNameAndDescription } from '../../shared';
interface Props {
handlePipelineConfigUpdate: (configUpdate: Partial<MlInferenceState>) => void;
@ -47,12 +43,6 @@ export const PipelineDetails: FC<Props> = memo(
targetField,
targetFieldError,
}) => {
const {
services: {
docLinks: { links },
},
} = useMlKibana();
const handleConfigChange = (value: string, type: string) => {
handlePipelineConfigUpdate({ [type]: value });
};
@ -60,133 +50,18 @@ export const PipelineDetails: FC<Props> = memo(
return (
<EuiFlexGroup>
<EuiFlexItem grow={3}>
<EuiTitle size="s">
<h4>
{i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.title',
{ defaultMessage: 'Create a pipeline' }
)}
</h4>
</EuiTitle>
<EuiSpacer size="m" />
<EuiText color="subdued" size="s">
<p>
<FormattedMessage
id="xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.description"
defaultMessage="Build a {pipeline} to use the trained data frame analytics model - {modelId} - for inference."
values={{
modelId: <EuiCode>{modelId}</EuiCode>,
pipeline: (
<EuiLink external target="_blank" href={links.ingest.pipelines}>
pipeline
</EuiLink>
),
}}
/>
</p>
<p>
<FormattedMessage
id="xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.descriptionUsePipelines"
defaultMessage="Use {pipelineSimulateLink} or {reindexLink} to pass data into this pipeline. Predictions are stored in the Target field."
values={{
reindexLink: (
<EuiLink
external
target="_blank"
href={links.upgradeAssistant.reindexWithPipeline}
>
_reindex API
</EuiLink>
),
pipelineSimulateLink: (
<EuiLink external target="_blank" href={links.apis.simulatePipeline}>
pipeline/_simulate
</EuiLink>
),
}}
/>
</p>
</EuiText>
<PipelineDetailsTitle modelId={modelId} />
</EuiFlexItem>
<EuiFlexItem grow={7}>
<EuiPanel hasBorder={false} hasShadow={false}>
{/* NAME */}
<EuiForm component="form">
<EuiFormRow
fullWidth
label={i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.nameLabel',
{
defaultMessage: 'Name',
}
)}
helpText={
!pipelineNameError && (
<EuiText size="xs">
{i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.name.helpText',
{
defaultMessage:
'Pipeline names are unique within a deployment and can only contain letters, numbers, underscores, and hyphens.',
}
)}
</EuiText>
)
}
error={pipelineNameError}
isInvalid={pipelineNameError !== undefined}
>
<EuiFieldText
data-test-subj="mlTrainedModelsInferencePipelineNameInput"
fullWidth
placeholder={i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.namePlaceholder',
{
defaultMessage: 'Enter a unique name for this pipeline',
}
)}
value={pipelineName}
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
handleConfigChange(e.target.value, 'pipelineName')
}
/>
</EuiFormRow>
{/* DESCRIPTION */}
<EuiFormRow
fullWidth
label={i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.descriptionLabel',
{
defaultMessage: 'Description',
}
)}
helpText={
<EuiText size="xs">
{i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.description.helpText',
{
defaultMessage: 'A description of what this pipeline does.',
}
)}
</EuiText>
}
>
<EuiTextArea
compressed
fullWidth
data-test-subj="mlTrainedModelsInferencePipelineDescriptionInput"
placeholder={i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.descriptionPlaceholder',
{
defaultMessage: 'Add a description of what this pipeline does.',
}
)}
value={pipelineDescription}
onChange={(e: React.ChangeEvent<HTMLTextAreaElement>) =>
handleConfigChange(e.target.value, 'pipelineDescription')
}
/>
</EuiFormRow>
{/* NAME and DESCRIPTION */}
<PipelineNameAndDescription
pipelineName={pipelineName}
pipelineDescription={pipelineDescription}
pipelineNameError={pipelineNameError}
handlePipelineConfigUpdate={handlePipelineConfigUpdate}
/>
{/* TARGET FIELD */}
<EuiFormRow
fullWidth

View file

@ -57,13 +57,13 @@ const destIndexInvalid = i18n.translate(
interface Props {
pipelineName: string;
sourceIndex: string;
sourceIndex?: string;
}
export const ReindexWithPipeline: FC<Props> = ({ pipelineName, sourceIndex }) => {
const [selectedIndex, setSelectedIndex] = useState<EuiComboBoxOptionOption[]>([
{ label: sourceIndex },
]);
const [selectedIndex, setSelectedIndex] = useState<EuiComboBoxOptionOption[]>(
sourceIndex ? [{ label: sourceIndex }] : []
);
const [options, setOptions] = useState<EuiComboBoxOptionOption[]>([]);
const [destinationIndex, setDestinationIndex] = useState<string>('');
const [destinationIndexExists, setDestinationIndexExists] = useState<boolean>(false);
@ -205,7 +205,7 @@ export const ReindexWithPipeline: FC<Props> = ({ pipelineName, sourceIndex }) =>
setCanReindexError(errorMessage);
}
}
if (hasPrivileges !== undefined) {
if (hasPrivileges !== undefined && selectedIndex.length) {
checkPrivileges();
}
},
@ -264,6 +264,7 @@ export const ReindexWithPipeline: FC<Props> = ({ pipelineName, sourceIndex }) =>
<EuiButton
onClick={onReindex}
disabled={
selectedIndex.length === 0 ||
(destinationIndexInvalidMessage !== undefined && selectedIndex.length > 0) ||
!canReindex ||
destinationIndexExists
@ -395,7 +396,7 @@ export const ReindexWithPipeline: FC<Props> = ({ pipelineName, sourceIndex }) =>
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.reindexStartedMessage',
{
defaultMessage: 'Reindexing of {sourceIndex} to {destinationIndex} has started.',
values: { sourceIndex, destinationIndex },
values: { sourceIndex: selectedIndex[0].label, destinationIndex },
}
)}
color="success"

View file

@ -11,7 +11,8 @@ import { ADD_INFERENCE_PIPELINE_STEPS } from './constants';
export function getSteps(
step: AddInferencePipelineSteps,
isConfigureStepValid: boolean,
isPipelineDataValid: boolean
isPipelineDataValid: boolean,
hasProcessorStep: boolean
) {
let nextStep: AddInferencePipelineSteps | undefined;
let previousStep: AddInferencePipelineSteps | undefined;
@ -19,7 +20,9 @@ export function getSteps(
switch (step) {
case ADD_INFERENCE_PIPELINE_STEPS.DETAILS:
nextStep = ADD_INFERENCE_PIPELINE_STEPS.CONFIGURE_PROCESSOR;
nextStep = hasProcessorStep
? ADD_INFERENCE_PIPELINE_STEPS.CONFIGURE_PROCESSOR
: ADD_INFERENCE_PIPELINE_STEPS.ON_FAILURE;
isContinueButtonEnabled = isConfigureStepValid;
break;
case ADD_INFERENCE_PIPELINE_STEPS.CONFIGURE_PROCESSOR:
@ -29,7 +32,9 @@ export function getSteps(
break;
case ADD_INFERENCE_PIPELINE_STEPS.ON_FAILURE:
nextStep = ADD_INFERENCE_PIPELINE_STEPS.TEST;
previousStep = ADD_INFERENCE_PIPELINE_STEPS.CONFIGURE_PROCESSOR;
previousStep = hasProcessorStep
? ADD_INFERENCE_PIPELINE_STEPS.CONFIGURE_PROCESSOR
: ADD_INFERENCE_PIPELINE_STEPS.DETAILS;
isContinueButtonEnabled = isPipelineDataValid;
break;
case ADD_INFERENCE_PIPELINE_STEPS.TEST:

View file

@ -5,8 +5,10 @@
* 2.0.
*/
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import { i18n } from '@kbn/i18n';
import { IngestInferenceProcessor } from '@elastic/elasticsearch/lib/api/types';
import type { SupportedPytorchTasksType } from '@kbn/ml-trained-models-utils';
import { InferenceModelTypes } from './types';
import type { AddInferencePipelineFormErrors } from './types';
@ -46,6 +48,18 @@ const INFERENCE_CONFIG_MODEL_TYPE_ERROR = i18n.translate(
defaultMessage: 'Inference configuration inference type must match model type.',
}
);
const PROCESSOR_REQUIRED = i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.processorRequiredError',
{
defaultMessage: 'At least one processor is required to create the pipeline.',
}
);
const INFERENCE_PROCESSOR_REQUIRED = i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.inferenceProcessorRequiredError',
{
defaultMessage: "An inference processor specifying 'model_id' is required.",
}
);
const VALID_PIPELINE_NAME_REGEX = /^[\w\-]+$/;
export const isValidPipelineName = (input: string): boolean => {
@ -75,7 +89,7 @@ export const validateInferencePipelineConfigurationStep = (
export const validateInferenceConfig = (
inferenceConfig: IngestInferenceProcessor['inference_config'],
modelType?: InferenceModelTypes
modelType?: InferenceModelTypes | SupportedPytorchTasksType
) => {
const inferenceConfigKeys = Object.keys(inferenceConfig ?? {});
let error;
@ -116,3 +130,31 @@ export const validateFieldMap = (
return error;
};
export const validatePipelineProcessors = (
pipelineProcessors: estypes.IngestPipeline,
taskType?: SupportedPytorchTasksType
) => {
const { processors } = pipelineProcessors;
let error;
// Must have at least one processor
if (!Array.isArray(processors) || (Array.isArray(processors) && processors.length < 1)) {
error = PROCESSOR_REQUIRED;
}
const inferenceProcessor = processors?.find(
(processor) => processor.inference && processor.inference.model_id
);
if (inferenceProcessor === undefined) {
error = INFERENCE_PROCESSOR_REQUIRED;
} else {
// If populated, inference config must have the correct model type
const inferenceConfig = inferenceProcessor.inference?.inference_config;
if (taskType && inferenceConfig) {
error = validateInferenceConfig(inferenceConfig, taskType);
}
}
return error;
};

View file

@ -9,14 +9,14 @@ import React, { FC, useMemo } from 'react';
import { i18n } from '@kbn/i18n';
import { EuiButton, EuiButtonEmpty, EuiFlexGroup, EuiFlexItem } from '@elastic/eui';
import { AddInferencePipelineSteps } from '../types';
import type { AddInferencePipelineSteps } from '../ml_inference/types';
import {
BACK_BUTTON_LABEL,
CANCEL_BUTTON_LABEL,
CLOSE_BUTTON_LABEL,
CONTINUE_BUTTON_LABEL,
} from '../constants';
import { getSteps } from '../get_steps';
} from '../ml_inference/constants';
import { getSteps } from '../ml_inference/get_steps';
interface Props {
isDetailsStepValid: boolean;
@ -26,7 +26,8 @@ interface Props {
step: AddInferencePipelineSteps;
onClose: () => void;
onCreate: () => void;
setStep: React.Dispatch<React.SetStateAction<AddInferencePipelineSteps>>;
setStep: (step: AddInferencePipelineSteps) => void;
hasProcessorStep: boolean;
}
export const AddInferencePipelineFooter: FC<Props> = ({
@ -38,10 +39,11 @@ export const AddInferencePipelineFooter: FC<Props> = ({
onCreate,
step,
setStep,
hasProcessorStep,
}) => {
const { nextStep, previousStep, isContinueButtonEnabled } = useMemo(
() => getSteps(step, isDetailsStepValid, isConfigureProcessorStepValid),
[isDetailsStepValid, isConfigureProcessorStepValid, step]
() => getSteps(step, isDetailsStepValid, isConfigureProcessorStepValid, hasProcessorStep),
[isDetailsStepValid, isConfigureProcessorStepValid, step, hasProcessorStep]
);
return (

View file

@ -8,27 +8,44 @@
import React, { FC, memo } from 'react';
import { i18n } from '@kbn/i18n';
import { EuiStepsHorizontal, EuiStepsHorizontalProps } from '@elastic/eui';
import type { AddInferencePipelineSteps } from '../types';
import { ADD_INFERENCE_PIPELINE_STEPS } from '../constants';
import { EuiStepsHorizontal, type EuiStepsHorizontalProps } from '@elastic/eui';
import type { AddInferencePipelineSteps } from '../ml_inference/types';
import { ADD_INFERENCE_PIPELINE_STEPS } from '../ml_inference/constants';
const steps = Object.values(ADD_INFERENCE_PIPELINE_STEPS);
interface Props {
step: AddInferencePipelineSteps;
setStep: React.Dispatch<React.SetStateAction<AddInferencePipelineSteps>>;
setStep: (step: AddInferencePipelineSteps) => void;
isDetailsStepValid: boolean;
isConfigureProcessorStepValid: boolean;
isConfigureProcessorStepValid?: boolean;
hasProcessorStep: boolean;
pipelineCreated: boolean;
}
const DISABLED = 'disabled';
const COMPLETE = 'complete';
const INCOMPLETE = 'incomplete';
export const AddInferencePipelineHorizontalSteps: FC<Props> = memo(
({ step, setStep, isDetailsStepValid, isConfigureProcessorStepValid }) => {
({
step,
setStep,
isDetailsStepValid,
isConfigureProcessorStepValid,
hasProcessorStep,
pipelineCreated,
}) => {
const currentStepIndex = steps.findIndex((s) => s === step);
const navSteps: EuiStepsHorizontalProps['steps'] = [
{
// Details
onClick: () => setStep(ADD_INFERENCE_PIPELINE_STEPS.DETAILS),
status: isDetailsStepValid ? 'complete' : 'incomplete',
onClick: () => {
if (pipelineCreated) return;
setStep(ADD_INFERENCE_PIPELINE_STEPS.DETAILS);
},
status: isDetailsStepValid ? COMPLETE : INCOMPLETE,
title: i18n.translate(
'xpack.ml.inferencePipeline.content.indices.transforms.addInferencePipelineModal.steps.details.title',
{
@ -37,29 +54,12 @@ export const AddInferencePipelineHorizontalSteps: FC<Props> = memo(
),
},
{
// Processor configuration
// Handle failures
onClick: () => {
if (!isDetailsStepValid) return;
setStep(ADD_INFERENCE_PIPELINE_STEPS.CONFIGURE_PROCESSOR);
},
status:
isDetailsStepValid && isConfigureProcessorStepValid && currentStepIndex > 1
? 'complete'
: 'incomplete',
title: i18n.translate(
'xpack.ml.inferencePipeline.content.indices.transforms.addInferencePipelineModal.steps.configureProcessor.title',
{
defaultMessage: 'Configure processor',
}
),
},
{
// handle failures
onClick: () => {
if (!isDetailsStepValid) return;
if (!isDetailsStepValid || pipelineCreated) return;
setStep(ADD_INFERENCE_PIPELINE_STEPS.ON_FAILURE);
},
status: currentStepIndex > 2 ? 'complete' : 'incomplete',
status: currentStepIndex > 2 ? COMPLETE : INCOMPLETE,
title: i18n.translate(
'xpack.ml.inferencePipeline.content.indices.transforms.addInferencePipelineModal.steps.handleFailures.title',
{
@ -70,10 +70,10 @@ export const AddInferencePipelineHorizontalSteps: FC<Props> = memo(
{
// Test
onClick: () => {
if (!isConfigureProcessorStepValid || !isDetailsStepValid) return;
if (!isConfigureProcessorStepValid || !isDetailsStepValid || pipelineCreated) return;
setStep(ADD_INFERENCE_PIPELINE_STEPS.TEST);
},
status: currentStepIndex > 3 ? 'complete' : 'incomplete',
status: currentStepIndex > 3 ? COMPLETE : INCOMPLETE,
title: i18n.translate(
'xpack.ml.trainedModels.content.indices.transforms.addInferencePipelineModal.steps.test.title',
{
@ -84,10 +84,10 @@ export const AddInferencePipelineHorizontalSteps: FC<Props> = memo(
{
// Review and Create
onClick: () => {
if (!isConfigureProcessorStepValid) return;
if (!isConfigureProcessorStepValid || pipelineCreated) return;
setStep(ADD_INFERENCE_PIPELINE_STEPS.CREATE);
},
status: isDetailsStepValid && isConfigureProcessorStepValid ? 'incomplete' : 'disabled',
status: isDetailsStepValid && isConfigureProcessorStepValid ? INCOMPLETE : DISABLED,
title: i18n.translate(
'xpack.ml.inferencePipeline.content.indices.transforms.addInferencePipelineModal.steps.create.title',
{
@ -96,23 +96,60 @@ export const AddInferencePipelineHorizontalSteps: FC<Props> = memo(
),
},
];
if (hasProcessorStep === true) {
navSteps.splice(1, 0, {
// Processor configuration
onClick: () => {
if (!isDetailsStepValid || pipelineCreated) return;
setStep(ADD_INFERENCE_PIPELINE_STEPS.CONFIGURE_PROCESSOR);
},
status:
isDetailsStepValid && isConfigureProcessorStepValid && currentStepIndex > 1
? COMPLETE
: INCOMPLETE,
title: i18n.translate(
'xpack.ml.inferencePipeline.content.indices.transforms.addInferencePipelineModal.steps.configureProcessor.title',
{
defaultMessage: 'Configure processor',
}
),
});
}
let DETAILS_INDEX: number;
let CONFIGURE_INDEX: number | undefined;
let ON_FAILURE_INDEX: number;
let TEST_INDEX: number;
let CREATE_INDEX: number;
if (hasProcessorStep) {
[DETAILS_INDEX, CONFIGURE_INDEX, ON_FAILURE_INDEX, TEST_INDEX, CREATE_INDEX] = [
0, 1, 2, 3, 4, 5,
];
} else {
[DETAILS_INDEX, ON_FAILURE_INDEX, TEST_INDEX, CREATE_INDEX] = [0, 1, 2, 3, 4];
}
switch (step) {
case ADD_INFERENCE_PIPELINE_STEPS.DETAILS:
navSteps[0].status = 'current';
navSteps[DETAILS_INDEX].status = 'current';
break;
case ADD_INFERENCE_PIPELINE_STEPS.CONFIGURE_PROCESSOR:
navSteps[1].status = 'current';
if (CONFIGURE_INDEX !== undefined) {
navSteps[CONFIGURE_INDEX].status = 'current';
}
break;
case ADD_INFERENCE_PIPELINE_STEPS.ON_FAILURE:
navSteps[2].status = 'current';
navSteps[ON_FAILURE_INDEX].status = 'current';
break;
case ADD_INFERENCE_PIPELINE_STEPS.TEST:
navSteps[3].status = 'current';
navSteps[TEST_INDEX].status = 'current';
break;
case ADD_INFERENCE_PIPELINE_STEPS.CREATE:
navSteps[4].status = 'current';
navSteps[CREATE_INDEX].status = 'current';
break;
}
return <EuiStepsHorizontal steps={navSteps} size="s" />;
}
);

View file

@ -0,0 +1,13 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
export { AddInferencePipelineHorizontalSteps } from './add_inference_pipeline_horizontal_steps';
export { AddInferencePipelineFooter } from './add_inference_pipeline_footer';
export { ReviewAndCreatePipeline } from './review_and_create_pipeline';
export { OnFailureConfiguration } from './on_failure_configuration';
export { PipelineDetailsTitle } from './pipeline_details_title';
export { PipelineNameAndDescription } from './pipeline_name_and_description';

View file

@ -25,12 +25,12 @@ import { CodeEditor } from '@kbn/kibana-react-plugin/public';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n-react';
import { SaveChangesButton } from './save_changes_button';
import type { MlInferenceState } from '../types';
import { getDefaultOnFailureConfiguration } from '../state';
import { CANCEL_EDIT_MESSAGE, EDIT_MESSAGE } from '../constants';
import { useMlKibana } from '../../../contexts/kibana';
import { isValidJson } from '../../../../../common/util/validation_utils';
import { SaveChangesButton } from '../ml_inference/components/save_changes_button';
import type { MlInferenceState } from '../ml_inference/types';
import { getDefaultOnFailureConfiguration } from '../ml_inference/state';
import { CANCEL_EDIT_MESSAGE, EDIT_MESSAGE } from '../ml_inference/constants';
import { useMlKibana } from '../../contexts/kibana';
import { isValidJson } from '../../../../common/util/validation_utils';
interface Props {
handleAdvancedConfigUpdate: (configUpdate: Partial<MlInferenceState>) => void;

View file

@ -0,0 +1,73 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { FC } from 'react';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n-react';
import { EuiCode, EuiLink, EuiSpacer, EuiTitle, EuiText } from '@elastic/eui';
import { useMlKibana } from '../../contexts/kibana';
interface Props {
modelId: string;
}
export const PipelineDetailsTitle: FC<Props> = ({ modelId }) => {
const {
services: {
docLinks: { links },
},
} = useMlKibana();
return (
<>
<EuiTitle size="s">
<h4>
{i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.title',
{ defaultMessage: 'Create a pipeline' }
)}
</h4>
</EuiTitle>
<EuiSpacer size="m" />
<EuiText color="subdued" size="s">
<p>
<FormattedMessage
id="xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.description"
defaultMessage="Build a {pipeline} to use the trained model - {modelId} - for inference."
values={{
modelId: <EuiCode>{modelId}</EuiCode>,
pipeline: (
<EuiLink external target="_blank" href={links.ingest.pipelines}>
pipeline
</EuiLink>
),
}}
/>
</p>
<p>
<FormattedMessage
id="xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.descriptionUsePipelines"
defaultMessage="Use {pipelineSimulateLink} or {reindexLink} to pass data into this pipeline. Predictions are stored in the Target field."
values={{
reindexLink: (
<EuiLink external target="_blank" href={links.upgradeAssistant.reindexWithPipeline}>
_reindex API
</EuiLink>
),
pipelineSimulateLink: (
<EuiLink external target="_blank" href={links.apis.simulatePipeline}>
pipeline/_simulate
</EuiLink>
),
}}
/>
</p>
</EuiText>
</>
);
};

View file

@ -0,0 +1,115 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { FC } from 'react';
import { i18n } from '@kbn/i18n';
import { EuiFieldText, EuiFormRow, EuiText, EuiTextArea } from '@elastic/eui';
interface Props {
handlePipelineConfigUpdate: (configUpdate: Partial<any>) => void;
pipelineNameError: string | undefined;
pipelineDescription: string;
pipelineName: string;
}
export const PipelineNameAndDescription: FC<Props> = ({
pipelineName,
pipelineNameError,
pipelineDescription,
handlePipelineConfigUpdate,
}) => {
const handleConfigChange = (value: string, type: string) => {
handlePipelineConfigUpdate({ [type]: value });
};
return (
<>
<EuiFormRow
fullWidth
label={i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.nameLabel',
{
defaultMessage: 'Name',
}
)}
helpText={
!pipelineNameError && (
<EuiText size="xs">
{i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.name.helpText',
{
defaultMessage:
'Pipeline names are unique within a deployment and can only contain letters, numbers, underscores, and hyphens.',
}
)}
</EuiText>
)
}
error={pipelineNameError}
isInvalid={pipelineNameError !== undefined}
>
<EuiFieldText
data-test-subj="mlTrainedModelsInferencePipelineNameInput"
fullWidth
placeholder={i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.namePlaceholder',
{
defaultMessage: 'Enter a unique name for this pipeline',
}
)}
value={pipelineName}
onChange={(e: React.ChangeEvent<HTMLInputElement>) =>
handleConfigChange(e.target.value, 'pipelineName')
}
/>
</EuiFormRow>
{/* DESCRIPTION */}
<EuiFormRow
fullWidth
label={i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.descriptionLabel',
{
defaultMessage: 'Description',
}
)}
helpText={
<EuiText size="xs">
{i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.description.helpText',
{
defaultMessage: 'A description of the pipeline.',
}
)}
</EuiText>
}
>
<EuiTextArea
compressed
fullWidth
data-test-subj="mlTrainedModelsInferencePipelineDescriptionInput"
placeholder={i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.configure.descriptionPlaceholder',
{
defaultMessage: 'Add a pipeline description.',
}
)}
value={pipelineDescription}
onChange={(e: React.ChangeEvent<HTMLTextAreaElement>) =>
handleConfigChange(e.target.value, 'pipelineDescription')
}
/>
</EuiFormRow>
</>
);
};

View file

@ -7,6 +7,7 @@
import React, { FC, useMemo, useState } from 'react';
import { FormattedMessage } from '@kbn/i18n-react';
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import {
EuiAccordion,
@ -18,17 +19,28 @@ import {
EuiSpacer,
EuiTitle,
EuiText,
EuiTextColor,
htmlIdGenerator,
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { IngestPipeline } from '@elastic/elasticsearch/lib/api/types';
import { useMlKibana } from '../../../contexts/kibana';
import { ReindexWithPipeline } from './reindex_with_pipeline';
import { useMlKibana } from '../../contexts/kibana';
import { ReindexWithPipeline } from '../ml_inference/components/reindex_with_pipeline';
const MANAGEMENT_APP_ID = 'management';
function getFieldFromPipelineConfig(config: estypes.IngestPipeline) {
const { processors } = config;
let field = '';
if (processors?.length) {
field = Object.keys(processors[0].inference?.field_map ?? {})[0];
}
return field;
}
interface Props {
highlightTargetField?: boolean;
inferencePipeline: IngestPipeline;
modelType?: string;
pipelineName: string;
@ -38,6 +50,7 @@ interface Props {
}
export const ReviewAndCreatePipeline: FC<Props> = ({
highlightTargetField = false,
inferencePipeline,
modelType,
pipelineName,
@ -62,6 +75,10 @@ export const ReviewAndCreatePipeline: FC<Props> = ({
: links.ingest.inferenceClassification;
const accordionId = useMemo(() => htmlIdGenerator()(), []);
const targetedField = useMemo(
() => getFieldFromPipelineConfig(inferencePipeline),
[inferencePipeline]
);
const configCodeBlock = useMemo(
() => (
@ -84,7 +101,7 @@ export const ReviewAndCreatePipeline: FC<Props> = ({
gutterSize="s"
data-test-subj="mlTrainedModelsInferenceReviewAndCreateStep"
>
<EuiFlexItem grow={3}>
<EuiFlexItem grow={2}>
{pipelineCreated === false ? (
<EuiTitle size="s">
<h4>
@ -189,20 +206,19 @@ export const ReviewAndCreatePipeline: FC<Props> = ({
) : null}
</>
</EuiFlexItem>
<EuiFlexItem grow={7}>
<EuiText color="subdued" size="s">
<p>
{!pipelineCreated ? (
<FormattedMessage
id="xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.description"
defaultMessage="This pipeline will be created with the configuration below."
/>
) : null}
</p>
</EuiText>
</EuiFlexItem>
{highlightTargetField ? (
<EuiFlexItem grow={2}>
<EuiText>
<FormattedMessage
id="xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.targetFieldDescription"
defaultMessage="The field {field} will be targeted by the model for evaluation."
values={{ field: <EuiTextColor color="accent">{targetedField}</EuiTextColor> }}
/>
</EuiText>
</EuiFlexItem>
) : null}
<EuiFlexItem grow>
{pipelineCreated && sourceIndex ? (
{pipelineCreated ? (
<>
<EuiSpacer size="m" />
<EuiAccordion

View file

@ -0,0 +1,193 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { FC, useMemo, useState } from 'react';
import {
EuiFlyout,
EuiFlyoutBody,
EuiFlyoutHeader,
EuiFlyoutFooter,
EuiSpacer,
EuiTitle,
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { extractErrorProperties } from '@kbn/ml-error-utils';
import type { SupportedPytorchTasksType } from '@kbn/ml-trained-models-utils';
import { ModelItem } from '../models_list';
import type { AddInferencePipelineSteps } from '../../components/ml_inference/types';
import { ADD_INFERENCE_PIPELINE_STEPS } from '../../components/ml_inference/constants';
import { AddInferencePipelineFooter } from '../../components/shared';
import { AddInferencePipelineHorizontalSteps } from '../../components/shared';
import { getInitialState } from './state';
import { PipelineDetails } from './pipeline_details';
import { TestTrainedModel } from './test_trained_model';
import { OnFailureConfiguration } from '../../components/shared';
import { ReviewAndCreatePipeline } from '../../components/shared';
import { useMlApiContext } from '../../contexts/kibana';
import { getPipelineConfig } from './get_pipeline_config';
import { validateInferencePipelineConfigurationStep } from '../../components/ml_inference/validation';
import { type InferecePipelineCreationState } from './state';
import { useFetchPipelines } from '../../components/ml_inference/hooks/use_fetch_pipelines';
import { useTestTrainedModelsContext } from '../test_models/test_trained_models_context';
export interface CreatePipelineForModelFlyoutProps {
onClose: (refreshList?: boolean) => void;
model: ModelItem;
}
export const CreatePipelineForModelFlyout: FC<CreatePipelineForModelFlyoutProps> = ({
onClose,
model,
}) => {
const {
currentContext: { pipelineConfig },
} = useTestTrainedModelsContext();
const initialState = useMemo(
() => getInitialState(model, pipelineConfig),
// eslint-disable-next-line react-hooks/exhaustive-deps
[model.model_id, pipelineConfig]
);
const [formState, setFormState] = useState<InferecePipelineCreationState>(initialState);
const [step, setStep] = useState<AddInferencePipelineSteps>(ADD_INFERENCE_PIPELINE_STEPS.DETAILS);
const [hasUnsavedChanges, setHasUnsavedChanges] = useState<boolean>(false);
const taskType = useMemo(
() => Object.keys(model.inference_config ?? {})[0],
// eslint-disable-next-line react-hooks/exhaustive-deps
[model.model_id]
) as SupportedPytorchTasksType;
const {
trainedModels: { createInferencePipeline },
} = useMlApiContext();
const createPipeline = async () => {
setFormState({ ...formState, creatingPipeline: true });
try {
const config = getPipelineConfig(formState);
await createInferencePipeline(formState.pipelineName, config);
setFormState({
...formState,
pipelineCreated: true,
creatingPipeline: false,
pipelineError: undefined,
});
} catch (e) {
// eslint-disable-next-line no-console
console.error(e);
const errorProperties = extractErrorProperties(e);
setFormState({
...formState,
creatingPipeline: false,
pipelineError: errorProperties.message ?? e.message,
});
}
};
const pipelineNames = useFetchPipelines();
const handleConfigUpdate = (configUpdate: Partial<InferecePipelineCreationState>) => {
const updatedState = { ...formState, ...configUpdate };
setFormState(updatedState);
};
const handleSetStep = (currentStep: AddInferencePipelineSteps) => {
setStep(currentStep);
};
const { pipelineName: pipelineNameError } = useMemo(() => {
const errors = validateInferencePipelineConfigurationStep(
formState.pipelineName,
pipelineNames
);
return errors;
}, [pipelineNames, formState.pipelineName]);
return (
<EuiFlyout
onClose={onClose.bind(null, true)}
size="l"
data-test-subj="mlTrainedModelsFromTestInferencePipelineFlyout"
>
<EuiFlyoutHeader>
<EuiTitle size="m">
<h3>
{i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.createInferencePipeline.title',
{
defaultMessage: 'Create inference pipeline',
}
)}
</h3>
</EuiTitle>
</EuiFlyoutHeader>
<EuiFlyoutBody>
<AddInferencePipelineHorizontalSteps
step={step}
setStep={handleSetStep}
isDetailsStepValid={pipelineNameError === undefined}
isConfigureProcessorStepValid={hasUnsavedChanges === false}
hasProcessorStep={false}
pipelineCreated={formState.pipelineCreated}
/>
<EuiSpacer size="m" />
{step === ADD_INFERENCE_PIPELINE_STEPS.DETAILS && (
<PipelineDetails
handlePipelineConfigUpdate={handleConfigUpdate}
pipelineName={formState.pipelineName}
pipelineNameError={pipelineNameError}
pipelineDescription={formState.pipelineDescription}
modelId={model.model_id}
taskType={taskType}
initialPipelineConfig={formState.initialPipelineConfig}
setHasUnsavedChanges={setHasUnsavedChanges}
/>
)}
{step === ADD_INFERENCE_PIPELINE_STEPS.ON_FAILURE && (
<OnFailureConfiguration
ignoreFailure={formState.ignoreFailure}
takeActionOnFailure={formState.takeActionOnFailure}
handleAdvancedConfigUpdate={handleConfigUpdate}
onFailure={formState.onFailure}
/>
)}
{step === ADD_INFERENCE_PIPELINE_STEPS.TEST && (
<TestTrainedModel
model={model}
handlePipelineConfigUpdate={handleConfigUpdate}
externalPipelineConfig={getPipelineConfig(formState)}
/>
)}
{step === ADD_INFERENCE_PIPELINE_STEPS.CREATE && (
<ReviewAndCreatePipeline
highlightTargetField
inferencePipeline={getPipelineConfig(formState)}
pipelineName={formState.pipelineName}
pipelineCreated={formState.pipelineCreated}
pipelineError={formState.pipelineError}
/>
)}
</EuiFlyoutBody>
<EuiFlyoutFooter className="mlTrainedModelsInferencePipelineFlyoutFooter">
<AddInferencePipelineFooter
onClose={onClose}
onCreate={createPipeline}
step={step}
setStep={handleSetStep}
isDetailsStepValid={pipelineNameError === undefined}
isConfigureProcessorStepValid={hasUnsavedChanges === false}
pipelineCreated={formState.pipelineCreated}
creatingPipeline={formState.creatingPipeline}
hasProcessorStep={false}
/>
</EuiFlyoutFooter>
</EuiFlyout>
);
};

View file

@ -0,0 +1,118 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import {
IngestInferenceProcessor,
IngestInferenceConfig,
} from '@elastic/elasticsearch/lib/api/types';
import { isPopulatedObject } from '@kbn/ml-is-populated-object';
import { SUPPORTED_PYTORCH_TASKS } from '@kbn/ml-trained-models-utils';
import { DEFAULT_INPUT_FIELD } from '../test_models/models/inference_base';
const INPUT_FIELD = 'inputField';
const ZERO_SHOT_CLASSIFICATION_PROPERTIES = ['labels', 'multi_label'] as const;
const QUESTION_ANSWERING_PROPERTIES = ['question'] as const;
const MODEL_INFERENCE_CONFIG_PROPERTIES = {
[SUPPORTED_PYTORCH_TASKS.QUESTION_ANSWERING]: QUESTION_ANSWERING_PROPERTIES,
[SUPPORTED_PYTORCH_TASKS.ZERO_SHOT_CLASSIFICATION]: ZERO_SHOT_CLASSIFICATION_PROPERTIES,
} as const;
type SupportedModelInferenceConfigPropertiesType = keyof typeof MODEL_INFERENCE_CONFIG_PROPERTIES;
interface MLIngestInferenceProcessor extends IngestInferenceProcessor {
inference_config: MLInferencePipelineInferenceConfig;
}
// Currently, estypes doesn't include pipeline processor types with the trained model processors
type MLInferencePipelineInferenceConfig = IngestInferenceConfig & {
zero_shot_classification?: estypes.MlZeroShotClassificationInferenceOptions;
question_answering?: estypes.MlQuestionAnsweringInferenceUpdateOptions;
};
interface GetInferencePropertiesFromPipelineConfigReturnType {
inputField: string;
inferenceConfig?: MLInferencePipelineInferenceConfig;
inferenceObj?: IngestInferenceProcessor | MLIngestInferenceProcessor;
fieldMap?: IngestInferenceProcessor['field_map'];
labels?: string[];
multi_label?: boolean;
question?: string;
}
function isSupportedInferenceConfigPropertyType(
arg: unknown
): arg is SupportedModelInferenceConfigPropertiesType {
return typeof arg === 'string' && Object.keys(MODEL_INFERENCE_CONFIG_PROPERTIES).includes(arg);
}
export function isMlInferencePipelineInferenceConfig(
arg: unknown
): arg is MLInferencePipelineInferenceConfig {
return (
isPopulatedObject(arg, [SUPPORTED_PYTORCH_TASKS.QUESTION_ANSWERING]) ||
isPopulatedObject(arg, [SUPPORTED_PYTORCH_TASKS.ZERO_SHOT_CLASSIFICATION])
);
}
export function isMlIngestInferenceProcessor(arg: unknown): arg is MLIngestInferenceProcessor {
return (
isPopulatedObject(arg) &&
arg.hasOwnProperty('inference_config') &&
(isPopulatedObject(arg.inference_config, [SUPPORTED_PYTORCH_TASKS.QUESTION_ANSWERING]) ||
isPopulatedObject(arg.inference_config, [SUPPORTED_PYTORCH_TASKS.ZERO_SHOT_CLASSIFICATION]))
);
}
export function getInferencePropertiesFromPipelineConfig(
type: string,
pipelineConfig: estypes.IngestPipeline
): GetInferencePropertiesFromPipelineConfigReturnType {
const propertiesToReturn: GetInferencePropertiesFromPipelineConfigReturnType = {
[INPUT_FIELD]: '',
};
pipelineConfig.processors?.forEach((processor) => {
const { inference } = processor;
if (inference) {
propertiesToReturn.inferenceObj = inference;
// Get the input field
if (inference.field_map) {
propertiesToReturn.fieldMap = inference.field_map;
for (const [key, value] of Object.entries(inference.field_map)) {
if (value === DEFAULT_INPUT_FIELD) {
propertiesToReturn[INPUT_FIELD] = key;
}
}
if (propertiesToReturn[INPUT_FIELD] === '') {
// If not found, set to the first field in the field map
propertiesToReturn[INPUT_FIELD] = Object.keys(inference.field_map)[0];
}
}
propertiesToReturn.inferenceConfig = inference.inference_config;
// Get the properties associated with the type of model/task
if (
isMlInferencePipelineInferenceConfig(propertiesToReturn.inferenceConfig) &&
isSupportedInferenceConfigPropertyType(type)
) {
MODEL_INFERENCE_CONFIG_PROPERTIES[type]?.forEach((property) => {
const configSettings =
propertiesToReturn.inferenceConfig && propertiesToReturn.inferenceConfig[type];
propertiesToReturn[property] =
configSettings && configSettings.hasOwnProperty(property)
? // @ts-ignore
configSettings[property]
: undefined;
});
}
}
});
return propertiesToReturn;
}

View file

@ -0,0 +1,37 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import type { InferecePipelineCreationState } from './state';
export function getPipelineConfig(state: InferecePipelineCreationState): estypes.IngestPipeline {
const { ignoreFailure, modelId, onFailure, pipelineDescription, initialPipelineConfig } = state;
const processor =
initialPipelineConfig?.processors && initialPipelineConfig.processors?.length
? initialPipelineConfig?.processors[0]
: {};
return {
description: pipelineDescription,
processors: [
{
inference: {
...(processor?.inference
? {
...processor.inference,
ignore_failure: ignoreFailure,
...(onFailure && Object.keys(onFailure).length > 0
? { on_failure: onFailure }
: { on_failure: undefined }),
}
: {}),
model_id: modelId,
},
},
],
};
}

View file

@ -0,0 +1,183 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { FC, memo, useState } from 'react';
import {
EuiButtonEmpty,
EuiCodeBlock,
EuiFlexGroup,
EuiFlexItem,
EuiForm,
EuiFormRow,
EuiPanel,
} from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { CodeEditor } from '@kbn/kibana-react-plugin/public';
import type { SupportedPytorchTasksType } from '@kbn/ml-trained-models-utils';
import { type InferecePipelineCreationState } from './state';
import { EDIT_MESSAGE, CANCEL_EDIT_MESSAGE } from '../../components/ml_inference/constants';
import { isValidJson } from '../../../../common/util/validation_utils';
import { useTestTrainedModelsContext } from '../test_models/test_trained_models_context';
import { SaveChangesButton } from '../../components/ml_inference/components/save_changes_button';
import { validatePipelineProcessors } from '../../components/ml_inference/validation';
import { PipelineDetailsTitle, PipelineNameAndDescription } from '../../components/shared';
interface Props {
handlePipelineConfigUpdate: (configUpdate: Partial<InferecePipelineCreationState>) => void;
modelId: string;
pipelineNameError: string | undefined;
pipelineName: string;
pipelineDescription: string;
initialPipelineConfig?: InferecePipelineCreationState['initialPipelineConfig'];
setHasUnsavedChanges: React.Dispatch<React.SetStateAction<boolean>>;
taskType?: SupportedPytorchTasksType;
}
export const PipelineDetails: FC<Props> = memo(
({
handlePipelineConfigUpdate,
modelId,
pipelineName,
pipelineNameError,
pipelineDescription,
initialPipelineConfig,
setHasUnsavedChanges,
taskType,
}) => {
const [isProcessorConfigValid, setIsProcessorConfigValid] = useState<boolean>(true);
const [processorConfigError, setProcessorConfigError] = useState<string | undefined>();
const {
currentContext: { pipelineConfig },
} = useTestTrainedModelsContext();
const [processorConfigString, setProcessorConfigString] = useState<string>(
JSON.stringify(initialPipelineConfig ?? {}, null, 2)
);
const [editProcessorConfig, setEditProcessorConfig] = useState<boolean>(false);
const updateProcessorConfig = () => {
const invalidProcessorConfigMessage = validatePipelineProcessors(
JSON.parse(processorConfigString),
taskType
);
if (invalidProcessorConfigMessage === undefined) {
handlePipelineConfigUpdate({ initialPipelineConfig: JSON.parse(processorConfigString) });
setHasUnsavedChanges(false);
setEditProcessorConfig(false);
setProcessorConfigError(undefined);
} else {
setHasUnsavedChanges(true);
setIsProcessorConfigValid(false);
setProcessorConfigError(invalidProcessorConfigMessage);
}
};
const handleProcessorConfigChange = (json: string) => {
setProcessorConfigString(json);
const valid = isValidJson(json);
setIsProcessorConfigValid(valid);
};
const resetProcessorConfig = () => {
setProcessorConfigString(JSON.stringify(pipelineConfig, null, 2));
setIsProcessorConfigValid(true);
setProcessorConfigError(undefined);
};
return (
<EuiFlexGroup>
<EuiFlexItem grow={3}>
<PipelineDetailsTitle modelId={modelId} />
</EuiFlexItem>
<EuiFlexItem grow={7}>
<EuiPanel hasBorder={false} hasShadow={false}>
{/* NAME */}
<EuiForm component="form">
{/* NAME and DESCRIPTION */}
<PipelineNameAndDescription
pipelineName={pipelineName}
pipelineDescription={pipelineDescription}
pipelineNameError={pipelineNameError}
handlePipelineConfigUpdate={handlePipelineConfigUpdate}
/>
{/* PROCESSOR CONFIGURATION */}
<EuiFormRow
fullWidth
labelAppend={
<EuiFlexGroup gutterSize="xs" alignItems="center">
<EuiFlexItem grow={false}>
<EuiButtonEmpty
data-test-subj="mlTrainedModelsInferencePipelineInferenceConfigEditButton"
iconType="pencil"
size="xs"
onClick={() => {
const editingState = !editProcessorConfig;
if (editingState === false) {
setProcessorConfigError(undefined);
setIsProcessorConfigValid(true);
setHasUnsavedChanges(false);
}
setEditProcessorConfig(editingState);
}}
>
{editProcessorConfig ? CANCEL_EDIT_MESSAGE : EDIT_MESSAGE}
</EuiButtonEmpty>
</EuiFlexItem>
<EuiFlexItem grow={false}>
{editProcessorConfig ? (
<SaveChangesButton
onClick={updateProcessorConfig}
disabled={isProcessorConfigValid === false}
/>
) : null}
</EuiFlexItem>
<EuiFlexItem grow={false}>
{editProcessorConfig ? (
<EuiButtonEmpty size="xs" onClick={resetProcessorConfig}>
{i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.advanced.resetInferenceConfigButton',
{ defaultMessage: 'Reset' }
)}
</EuiButtonEmpty>
) : null}
</EuiFlexItem>
</EuiFlexGroup>
}
error={processorConfigError}
isInvalid={processorConfigError !== undefined}
data-test-subj="mlTrainedModelsInferencePipelineInferenceConfigEditor"
>
{editProcessorConfig ? (
<CodeEditor
height={300}
languageId="json"
options={{
automaticLayout: true,
lineNumbers: 'off',
tabSize: 2,
}}
value={processorConfigString}
onChange={handleProcessorConfigChange}
/>
) : (
<EuiCodeBlock
isCopyable={true}
data-test-subj="mlTrainedModelsInferencePipelineInferenceConfigBlock"
>
{processorConfigString}
</EuiCodeBlock>
)}
</EuiFormRow>
</EuiForm>
</EuiPanel>
</EuiFlexItem>
</EuiFlexGroup>
);
}
);

View file

@ -0,0 +1,42 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import { IngestInferenceProcessor } from '@elastic/elasticsearch/lib/api/types';
import { getDefaultOnFailureConfiguration } from '../../components/ml_inference/state';
import type { ModelItem } from '../models_list';
export interface InferecePipelineCreationState {
creatingPipeline: boolean;
error: boolean;
ignoreFailure: boolean;
modelId: string;
onFailure?: IngestInferenceProcessor['on_failure'];
pipelineName: string;
pipelineNameError?: string;
pipelineDescription: string;
pipelineCreated: boolean;
pipelineError?: string;
initialPipelineConfig?: estypes.IngestPipeline;
takeActionOnFailure: boolean;
}
export const getInitialState = (
model: ModelItem,
initialPipelineConfig: estypes.IngestPipeline | undefined
): InferecePipelineCreationState => ({
creatingPipeline: false,
error: false,
ignoreFailure: false,
modelId: model.model_id,
onFailure: getDefaultOnFailureConfiguration(),
pipelineDescription: `Uses the pre-trained model ${model.model_id} to infer against the data that is being ingested in the pipeline`,
pipelineName: `ml-inference-${model.model_id}`,
pipelineCreated: false,
initialPipelineConfig,
takeActionOnFailure: true,
});

View file

@ -0,0 +1,79 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { FC } from 'react';
import { EuiFlexGroup, EuiFlexItem, EuiLink, EuiSpacer, EuiText, EuiTitle } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n-react';
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import { ModelItem } from '../models_list';
import { TestTrainedModelContent } from '../test_models/test_trained_model_content';
import { useMlKibana } from '../../contexts/kibana';
import { type InferecePipelineCreationState } from './state';
interface ContentProps {
model: ModelItem;
handlePipelineConfigUpdate: (configUpdate: Partial<InferecePipelineCreationState>) => void;
externalPipelineConfig?: estypes.IngestPipeline;
}
export const TestTrainedModel: FC<ContentProps> = ({
model,
handlePipelineConfigUpdate,
externalPipelineConfig,
}) => {
const {
services: {
docLinks: { links },
},
} = useMlKibana();
return (
<EuiFlexGroup>
<EuiFlexItem grow={3}>
<EuiTitle size="s">
<h4>
{i18n.translate(
'xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.advanced.testTrainedModelTitle',
{ defaultMessage: 'Try it out' }
)}
</h4>
</EuiTitle>
<EuiSpacer size="m" />
<EuiText color="subdued" size="s">
<p>
<FormattedMessage
id="xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.advanced.testTrainedModelExplanation"
defaultMessage="Test the model to ensure it's working properly."
/>
</p>
<p>
<FormattedMessage
id="xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.advanced.testTrainedModelDescription"
defaultMessage="Test the model against new data by either providing input text or using a field of an existing index. Alternatively, you can use the infer trained model API. {inferTrainedModelApiLink}."
values={{
inferTrainedModelApiLink: (
<EuiLink external target="_blank" href={links.apis.inferTrainedModel}>
Learn more.
</EuiLink>
),
}}
/>
</p>
</EuiText>
</EuiFlexItem>
<EuiFlexItem grow={7}>
<TestTrainedModelContent
model={model}
handlePipelineConfigUpdate={handlePipelineConfigUpdate}
externalPipelineConfig={externalPipelineConfig}
/>
</EuiFlexItem>
</EuiFlexGroup>
);
};

View file

@ -69,7 +69,7 @@ import { useToastNotificationService } from '../services/toast_notification_serv
import { useFieldFormatter } from '../contexts/kibana/use_field_formatter';
import { useRefresh } from '../routing/use_refresh';
import { SavedObjectsWarning } from '../components/saved_objects_warning';
import { TestTrainedModelFlyout } from './test_models';
import { TestModelAndPipelineCreationFlyout } from './test_models';
import { TestDfaModelsFlyout } from './test_dfa_models_flyout';
import { AddInferencePipelineFlyout } from '../components/ml_inference';
import { useEnabledFeatures } from '../contexts/ml';
@ -819,7 +819,15 @@ export const ModelsList: FC<Props> = ({
/>
)}
{modelToTest === null ? null : (
<TestTrainedModelFlyout model={modelToTest} onClose={setModelToTest.bind(null, null)} />
<TestModelAndPipelineCreationFlyout
model={modelToTest}
onClose={(refreshList?: boolean) => {
setModelToTest(null);
if (refreshList) {
fetchModelsData();
}
}}
/>
)}
{dfaModelToTest === null ? null : (
<TestDfaModelsFlyout model={dfaModelToTest} onClose={setDfaModelToTest.bind(null, null)} />

View file

@ -5,5 +5,5 @@
* 2.0.
*/
export { TestTrainedModelFlyout } from './test_flyout';
export { TestModelAndPipelineCreationFlyout } from './test_model_and_pipeline_creation_flyout';
export { isTestable, isDfaTrainedModel } from './utils';

View file

@ -10,7 +10,15 @@ import React, { FC, useState, useMemo, useEffect, useCallback } from 'react';
import useObservable from 'react-use/lib/useObservable';
import { firstValueFrom } from 'rxjs';
import { DataView } from '@kbn/data-views-plugin/common';
import { EuiSpacer, EuiSelect, EuiFormRow, EuiAccordion, EuiCodeBlock } from '@elastic/eui';
import {
EuiAccordion,
EuiCode,
EuiCodeBlock,
EuiFormRow,
EuiSpacer,
EuiSelect,
EuiText,
} from '@elastic/eui';
import { isPopulatedObject } from '@kbn/ml-is-populated-object';
import { i18n } from '@kbn/i18n';
@ -21,9 +29,14 @@ import type { InferrerType } from '.';
interface Props {
inferrer: InferrerType;
data: ReturnType<typeof useIndexInput>;
disableIndexSelection: boolean;
}
export const InferenceInputFormIndexControls: FC<Props> = ({ inferrer, data }) => {
export const InferenceInputFormIndexControls: FC<Props> = ({
inferrer,
data,
disableIndexSelection,
}) => {
const {
dataViewListItems,
fieldNames,
@ -40,14 +53,25 @@ export const InferenceInputFormIndexControls: FC<Props> = ({ inferrer, data }) =
return (
<>
<EuiFormRow label="Index" fullWidth>
<EuiSelect
options={dataViewListItems}
value={selectedDataViewId}
onChange={(e) => setSelectedDataViewId(e.target.value)}
hasNoInitialSelection={true}
disabled={runningState === RUNNING_STATE.RUNNING}
fullWidth
/>
{disableIndexSelection ? (
<EuiText grow={false}>
<EuiCode>
{dataViewListItems.find((item) => item.value === selectedDataViewId)?.text}
</EuiCode>
</EuiText>
) : (
<EuiSelect
options={dataViewListItems}
value={selectedDataViewId}
onChange={(e) => {
inferrer.setSelectedDataViewId(e.target.value);
setSelectedDataViewId(e.target.value);
}}
hasNoInitialSelection={true}
disabled={runningState === RUNNING_STATE.RUNNING}
fullWidth
/>
)}
</EuiFormRow>
<EuiSpacer size="m" />
<EuiFormRow
@ -59,7 +83,9 @@ export const InferenceInputFormIndexControls: FC<Props> = ({ inferrer, data }) =
<EuiSelect
options={fieldNames}
value={selectedField}
onChange={(e) => setSelectedField(e.target.value)}
onChange={(e) => {
setSelectedField(e.target.value);
}}
hasNoInitialSelection={true}
disabled={runningState === RUNNING_STATE.RUNNING}
fullWidth
@ -79,7 +105,14 @@ export const InferenceInputFormIndexControls: FC<Props> = ({ inferrer, data }) =
}
)}
>
<EuiCodeBlock language="json" fontSize="s" paddingSize="s" lineNumbers isCopyable={true}>
<EuiCodeBlock
language="json"
fontSize="s"
paddingSize="s"
lineNumbers
isCopyable={true}
overflowHeight={300}
>
{JSON.stringify(pipeline, null, 2)}
</EuiCodeBlock>
</EuiAccordion>
@ -87,7 +120,13 @@ export const InferenceInputFormIndexControls: FC<Props> = ({ inferrer, data }) =
);
};
export function useIndexInput({ inferrer }: { inferrer: InferrerType }) {
export function useIndexInput({
inferrer,
defaultSelectedDataViewId,
}: {
inferrer: InferrerType;
defaultSelectedDataViewId?: string;
}) {
const {
services: {
data: {
@ -100,7 +139,9 @@ export function useIndexInput({ inferrer }: { inferrer: InferrerType }) {
const [dataViewListItems, setDataViewListItems] = useState<
Array<{ value: string; text: string }>
>([]);
const [selectedDataViewId, setSelectedDataViewId] = useState<string | undefined>(undefined);
const [selectedDataViewId, setSelectedDataViewId] = useState<string | undefined>(
defaultSelectedDataViewId
);
const [selectedDataView, setSelectedDataView] = useState<DataView | null>(null);
const [fieldNames, setFieldNames] = useState<Array<{ value: string; text: string }>>([]);
const selectedField = useObservable(inferrer.getInputField$(), inferrer.getInputField());
@ -197,11 +238,20 @@ export function useIndexInput({ inferrer }: { inferrer: InferrerType }) {
}));
setFieldNames(tempFieldNames);
const fieldName = tempFieldNames.length === 1 ? tempFieldNames[0].value : undefined;
const defaultSelectedField = inferrer.getInputField();
const fieldName =
defaultSelectedField &&
tempFieldNames.find((field) => field.value === defaultSelectedField)
? defaultSelectedField
: tempFieldNames[0].value;
// Only set a field if it's the default field
// if (inferrer.getInputField() === DEFAULT_INPUT_FIELD) {
inferrer.setInputField(fieldName);
// }
}
},
[selectedDataView, inferrer]
[selectedDataView, inferrer] // defaultSelectedField
);
useEffect(

View file

@ -31,7 +31,7 @@ export type InferenceOptions =
| estypes.MlTextEmbeddingInferenceOptions
| estypes.MlQuestionAnsweringInferenceUpdateOptions;
const DEFAULT_INPUT_FIELD = 'text_field';
export const DEFAULT_INPUT_FIELD = 'text_field';
export const DEFAULT_INFERENCE_TIME_OUT = '30s';
export type FormattedNerResponse = Array<{
@ -72,8 +72,10 @@ export abstract class InferenceBase<TInferResponse> {
private isValid$ = new BehaviorSubject<boolean>(false);
private pipeline$ = new BehaviorSubject<estypes.IngestPipeline>({});
private supportedFieldTypes: ES_FIELD_TYPES[] = [ES_FIELD_TYPES.TEXT];
private selectedDataViewId: string | undefined;
protected readonly info: string[] = [];
public switchToCreationMode?: () => void;
private subscriptions$: Subscription = new Subscription();
@ -87,8 +89,13 @@ export abstract class InferenceBase<TInferResponse> {
this.inputField$.next(this.modelInputField);
}
public setSwitchtoCreationMode(callback: () => void) {
this.switchToCreationMode = callback;
}
public destroy() {
this.subscriptions$.unsubscribe();
this.pipeline$.unsubscribe();
}
protected initialize(
@ -162,6 +169,15 @@ export abstract class InferenceBase<TInferResponse> {
this.runningState$.next(RUNNING_STATE.STOPPED);
}
public setSelectedDataViewId(dataViewId: string) {
// Data view selected for testing
this.selectedDataViewId = dataViewId;
}
public getSelectedDataViewId() {
return this.selectedDataViewId;
}
public setInputField(field: string | undefined) {
// if the field is not set, change to be the same as the model input field
this.inputField$.next(field === undefined ? this.modelInputField : field);

View file

@ -11,7 +11,6 @@ import useObservable from 'react-use/lib/useObservable';
import {
EuiSpacer,
EuiButton,
EuiButtonEmpty,
EuiFlexGroup,
EuiFlexItem,
@ -28,13 +27,22 @@ import { ErrorMessage } from '../../inference_error';
import type { InferrerType } from '..';
import { useIndexInput, InferenceInputFormIndexControls } from '../index_input';
import { RUNNING_STATE } from '../inference_base';
import { InputFormControls } from './input_form_controls';
import { useTestTrainedModelsContext } from '../../test_trained_models_context';
interface Props {
inferrer: InferrerType;
}
export const IndexInputForm: FC<Props> = ({ inferrer }) => {
const data = useIndexInput({ inferrer });
const {
currentContext: { defaultSelectedDataViewId, createPipelineFlyoutOpen },
} = useTestTrainedModelsContext();
const data = useIndexInput({
inferrer,
defaultSelectedDataViewId,
});
const { reloadExamples, selectedField } = data;
const [errorText, setErrorText] = useState<string | null>(null);
@ -60,23 +68,26 @@ export const IndexInputForm: FC<Props> = ({ inferrer }) => {
return (
<EuiForm component={'form'} onSubmit={run}>
<>{infoComponent}</>
<InferenceInputFormIndexControls inferrer={inferrer} data={data} />
<InferenceInputFormIndexControls
inferrer={inferrer}
data={data}
disableIndexSelection={defaultSelectedDataViewId !== undefined}
/>
<EuiSpacer size="m" />
<EuiFlexGroup>
<EuiFlexItem grow={false}>
<EuiButton
disabled={runningState === RUNNING_STATE.RUNNING || isValid === false}
fullWidth={false}
data-test-subj={'mlTestModelTestButton'}
type={'submit'}
>
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.inferenceInputForm.runButton"
defaultMessage="Test"
<EuiFlexGroup>
<InputFormControls
testButtonDisabled={runningState === RUNNING_STATE.RUNNING || isValid === false}
createPipelineButtonDisabled={
runningState === RUNNING_STATE.RUNNING || isValid === false
}
showCreatePipelineButton={!createPipelineFlyoutOpen}
inferrer={inferrer}
/>
</EuiButton>
</EuiFlexGroup>
</EuiFlexItem>
<EuiFlexItem grow={false}>
{runningState === RUNNING_STATE.RUNNING ? <EuiLoadingSpinner size="xl" /> : null}

View file

@ -0,0 +1,63 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { FC } from 'react';
import { EuiButton, EuiButtonEmpty, EuiFlexItem } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n-react';
import type { InferrerType } from '..';
interface Props {
testButtonDisabled: boolean;
createPipelineButtonDisabled: boolean;
inferrer: InferrerType;
showCreatePipelineButton?: boolean;
}
export const InputFormControls: FC<Props> = ({
testButtonDisabled,
createPipelineButtonDisabled,
inferrer,
showCreatePipelineButton,
}) => {
return (
<>
<EuiFlexItem grow={false}>
<EuiButton
disabled={testButtonDisabled}
fullWidth={false}
data-test-subj={'mlTestModelTestButton'}
type={'submit'}
>
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.inferenceInputForm.runButton"
defaultMessage="Test"
/>
</EuiButton>
</EuiFlexItem>
{showCreatePipelineButton ? (
<EuiFlexItem grow={false}>
<EuiButtonEmpty
disabled={createPipelineButtonDisabled}
data-test-subj={'mlTestModelCreatePipelineButton'}
onClick={() => {
if (inferrer.switchToCreationMode) {
inferrer.switchToCreationMode();
}
}}
>
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.inferenceInputForm.createPipelineButton"
defaultMessage="Create pipeline"
/>
</EuiButtonEmpty>
</EuiFlexItem>
) : null}
</>
);
};

View file

@ -8,7 +8,7 @@
import React, { FC, useState, useMemo, useCallback, FormEventHandler } from 'react';
import useObservable from 'react-use/lib/useObservable';
import { EuiSpacer, EuiButton, EuiTabs, EuiTab, EuiForm } from '@elastic/eui';
import { EuiFlexGroup, EuiSpacer, EuiTabs, EuiTab, EuiForm } from '@elastic/eui';
import { FormattedMessage } from '@kbn/i18n-react';
import { extractErrorMessage } from '@kbn/ml-error-utils';
@ -18,6 +18,7 @@ import type { InferrerType } from '..';
import { OutputLoadingContent } from '../../output_loading';
import { RUNNING_STATE } from '../inference_base';
import { RawOutput } from '../raw_output';
import { InputFormControls } from './input_form_controls';
interface Props {
inferrer: InferrerType;
@ -57,17 +58,15 @@ export const TextInputForm: FC<Props> = ({ inferrer }) => {
<>{inputComponent}</>
<EuiSpacer size="m" />
<div>
<EuiButton
disabled={runningState === RUNNING_STATE.RUNNING || isValid === false}
fullWidth={false}
data-test-subj={'mlTestModelTestButton'}
type="submit"
>
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.inferenceInputForm.runButton"
defaultMessage="Test"
<EuiFlexGroup>
<InputFormControls
testButtonDisabled={runningState === RUNNING_STATE.RUNNING || isValid === false}
createPipelineButtonDisabled={
runningState === RUNNING_STATE.RUNNING || isValid === false
}
inferrer={inferrer}
/>
</EuiButton>
</EuiFlexGroup>
</div>
{runningState !== RUNNING_STATE.STOPPED ? (
<>

View file

@ -7,6 +7,7 @@
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import React, { FC, useMemo, useEffect } from 'react';
import { cloneDeep } from 'lodash';
import { TRAINED_MODEL_TYPE, SUPPORTED_PYTORCH_TASKS } from '@kbn/ml-trained-models-utils';
import { NerInference } from './models/ner';
@ -22,52 +23,182 @@ import {
import { TextEmbeddingInference } from './models/text_embedding';
import { useMlApiContext } from '../../contexts/kibana';
import { type TestTrainedModelsContextType } from './test_trained_models_context';
import { InferenceInputForm } from './models/inference_input_form';
import { InferrerType } from './models';
import { INPUT_TYPE } from './models/inference_base';
import { TextExpansionInference } from './models/text_expansion';
import { type InferecePipelineCreationState } from '../create_pipeline_for_model/state';
import {
getInferencePropertiesFromPipelineConfig,
isMlIngestInferenceProcessor,
isMlInferencePipelineInferenceConfig,
} from '../create_pipeline_for_model/get_inference_properties_from_pipeline_config';
interface Props {
model: estypes.MlTrainedModelConfig;
inputType: INPUT_TYPE;
deploymentId: string;
handlePipelineConfigUpdate?: (configUpdate: Partial<InferecePipelineCreationState>) => void;
externalPipelineConfig?: estypes.IngestPipeline;
setCurrentContext?: React.Dispatch<TestTrainedModelsContextType>;
}
export const SelectedModel: FC<Props> = ({ model, inputType, deploymentId }) => {
export const SelectedModel: FC<Props> = ({
model,
inputType,
deploymentId,
handlePipelineConfigUpdate,
externalPipelineConfig,
setCurrentContext,
}) => {
const { trainedModels } = useMlApiContext();
const inferrer = useMemo<InferrerType | undefined>(() => {
if (model.model_type === TRAINED_MODEL_TYPE.PYTORCH) {
const taskType = Object.keys(model.inference_config ?? {})[0];
const taskType = Object.keys(model.inference_config ?? {})[0];
let tempInferrer: InferrerType | undefined;
const pipelineConfigValues = externalPipelineConfig
? getInferencePropertiesFromPipelineConfig(taskType, externalPipelineConfig)
: null;
if (model.model_type === TRAINED_MODEL_TYPE.PYTORCH) {
switch (taskType) {
case SUPPORTED_PYTORCH_TASKS.NER:
return new NerInference(trainedModels, model, inputType, deploymentId);
tempInferrer = new NerInference(trainedModels, model, inputType, deploymentId);
break;
case SUPPORTED_PYTORCH_TASKS.TEXT_CLASSIFICATION:
return new TextClassificationInference(trainedModels, model, inputType, deploymentId);
tempInferrer = new TextClassificationInference(
trainedModels,
model,
inputType,
deploymentId
);
break;
case SUPPORTED_PYTORCH_TASKS.ZERO_SHOT_CLASSIFICATION:
return new ZeroShotClassificationInference(trainedModels, model, inputType, deploymentId);
tempInferrer = new ZeroShotClassificationInference(
trainedModels,
model,
inputType,
deploymentId
);
if (pipelineConfigValues) {
const { labels, multi_label: multiLabel } = pipelineConfigValues;
if (labels && multiLabel !== undefined) {
tempInferrer.setLabelsText(Array.isArray(labels) ? labels.join(',') : labels);
tempInferrer.setMultiLabel(Boolean(multiLabel));
}
}
break;
case SUPPORTED_PYTORCH_TASKS.TEXT_EMBEDDING:
return new TextEmbeddingInference(trainedModels, model, inputType, deploymentId);
tempInferrer = new TextEmbeddingInference(trainedModels, model, inputType, deploymentId);
break;
case SUPPORTED_PYTORCH_TASKS.FILL_MASK:
return new FillMaskInference(trainedModels, model, inputType, deploymentId);
tempInferrer = new FillMaskInference(trainedModels, model, inputType, deploymentId);
break;
case SUPPORTED_PYTORCH_TASKS.QUESTION_ANSWERING:
return new QuestionAnsweringInference(trainedModels, model, inputType, deploymentId);
tempInferrer = new QuestionAnsweringInference(
trainedModels,
model,
inputType,
deploymentId
);
if (pipelineConfigValues?.question) {
tempInferrer.setQuestionText(pipelineConfigValues.question);
}
break;
case SUPPORTED_PYTORCH_TASKS.TEXT_EXPANSION:
return new TextExpansionInference(trainedModels, model, inputType, deploymentId);
tempInferrer = new TextExpansionInference(trainedModels, model, inputType, deploymentId);
break;
default:
break;
}
} else if (model.model_type === TRAINED_MODEL_TYPE.LANG_IDENT) {
return new LangIdentInference(trainedModels, model, inputType, deploymentId);
tempInferrer = new LangIdentInference(trainedModels, model, inputType, deploymentId);
}
}, [inputType, model, trainedModels, deploymentId]);
if (tempInferrer) {
if (pipelineConfigValues) {
tempInferrer.setInputField(pipelineConfigValues.inputField);
}
if (externalPipelineConfig === undefined) {
tempInferrer.setSwitchtoCreationMode(() => {
if (tempInferrer && setCurrentContext) {
setCurrentContext({
pipelineConfig: tempInferrer.getPipeline(),
defaultSelectedDataViewId: tempInferrer.getSelectedDataViewId(),
createPipelineFlyoutOpen: true,
});
}
});
} else {
tempInferrer?.getPipeline$().subscribe((testPipeline) => {
if (handlePipelineConfigUpdate && testPipeline && externalPipelineConfig) {
const {
fieldMap: testFieldMap,
inferenceConfig: testInferenceConfig,
labels,
multi_label: multiLabel,
question,
} = getInferencePropertiesFromPipelineConfig(taskType, testPipeline);
const updatedPipeline = cloneDeep(externalPipelineConfig);
const { inferenceObj: externalInference, inferenceConfig: externalInferenceConfig } =
getInferencePropertiesFromPipelineConfig(taskType, updatedPipeline);
if (externalInference) {
// Always update target field change
externalInference.field_map = testFieldMap;
if (externalInferenceConfig === undefined) {
externalInference.inference_config = testInferenceConfig;
} else if (
isMlIngestInferenceProcessor(externalInference) &&
isMlInferencePipelineInferenceConfig(externalInference.inference_config)
) {
// Only update the properties that change in the test step to avoid overwriting user edits
if (
taskType === SUPPORTED_PYTORCH_TASKS.ZERO_SHOT_CLASSIFICATION &&
labels &&
multiLabel !== undefined
) {
const external =
externalInference.inference_config[
SUPPORTED_PYTORCH_TASKS.ZERO_SHOT_CLASSIFICATION
];
if (external) {
external.multi_label = multiLabel;
external.labels = labels;
}
} else if (
taskType === SUPPORTED_PYTORCH_TASKS.QUESTION_ANSWERING &&
question !== undefined
) {
const external =
externalInference.inference_config[SUPPORTED_PYTORCH_TASKS.QUESTION_ANSWERING];
if (external) {
external.question = question;
}
}
}
}
handlePipelineConfigUpdate({
initialPipelineConfig: updatedPipeline,
});
}
});
}
}
return tempInferrer;
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [inputType, model, trainedModels, deploymentId, setCurrentContext]);
useEffect(() => {
return () => {
inferrer?.destroy();
};
}, [inferrer]);
}, [inferrer, model.model_id]);
if (inferrer !== undefined) {
return <InferenceInputForm inferrer={inferrer} inputType={inputType} />;

View file

@ -5,126 +5,36 @@
* 2.0.
*/
import React, { FC, useState, useMemo } from 'react';
import React, { FC } from 'react';
import { FormattedMessage } from '@kbn/i18n-react';
import {
EuiFlyout,
EuiFlyoutBody,
EuiFlyoutHeader,
EuiFormRow,
EuiSelect,
EuiSpacer,
EuiTab,
EuiTabs,
EuiTitle,
useEuiPaddingSize,
} from '@elastic/eui';
import { EuiFlyout, EuiFlyoutBody, EuiFlyoutHeader, EuiSpacer, EuiTitle } from '@elastic/eui';
import { SUPPORTED_PYTORCH_TASKS } from '@kbn/ml-trained-models-utils';
import { SelectedModel } from './selected_model';
import { INPUT_TYPE } from './models/inference_base';
import { type ModelItem } from '../models_list';
import { TestTrainedModelContent } from './test_trained_model_content';
interface Props {
model: ModelItem;
onClose: () => void;
}
export const TestTrainedModelFlyout: FC<Props> = ({ model, onClose }) => {
const [deploymentId, setDeploymentId] = useState<string>(model.deployment_ids[0]);
const mediumPadding = useEuiPaddingSize('m');
const [inputType, setInputType] = useState<INPUT_TYPE>(INPUT_TYPE.TEXT);
const onlyShowTab: INPUT_TYPE | undefined = useMemo(() => {
return (model.type ?? []).includes(SUPPORTED_PYTORCH_TASKS.TEXT_EXPANSION)
? INPUT_TYPE.INDEX
: undefined;
}, [model]);
return (
<>
<EuiFlyout maxWidth={600} onClose={onClose} data-test-subj="mlTestModelsFlyout">
<EuiFlyoutHeader hasBorder>
<EuiTitle size="m">
<h2>
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.headerLabel"
defaultMessage="Test trained model"
/>
</h2>
</EuiTitle>
<EuiSpacer size="s" />
<EuiTitle size="xs">
<h4>{model.model_id}</h4>
</EuiTitle>
</EuiFlyoutHeader>
<EuiFlyoutBody>
{model.deployment_ids.length > 1 ? (
<>
<EuiFormRow
fullWidth
label={
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.deploymentIdLabel"
defaultMessage="Deployment ID"
/>
}
>
<EuiSelect
fullWidth
options={model.deployment_ids.map((v) => {
return { text: v, value: v };
})}
value={deploymentId}
onChange={(e) => {
setDeploymentId(e.target.value);
}}
/>
</EuiFormRow>
<EuiSpacer size="l" />
</>
) : null}
{onlyShowTab === undefined ? (
<>
<EuiTabs
size="m"
css={{
marginTop: `-${mediumPadding}`,
}}
>
<EuiTab
isSelected={inputType === INPUT_TYPE.TEXT}
onClick={() => setInputType(INPUT_TYPE.TEXT)}
>
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.textTab"
defaultMessage="Test using text"
/>
</EuiTab>
<EuiTab
isSelected={inputType === INPUT_TYPE.INDEX}
onClick={() => setInputType(INPUT_TYPE.INDEX)}
>
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.indexTab"
defaultMessage="Test using existing index"
/>
</EuiTab>
</EuiTabs>
<EuiSpacer size="m" />
</>
) : null}
<SelectedModel
model={model}
inputType={onlyShowTab ?? inputType}
deploymentId={deploymentId ?? model.model_id}
export const TestTrainedModelFlyout: FC<Props> = ({ model, onClose }) => (
<EuiFlyout maxWidth={600} onClose={onClose} data-test-subj="mlTestModelsFlyout">
<EuiFlyoutHeader hasBorder>
<EuiTitle size="m">
<h2>
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.headerLabel"
defaultMessage="Test trained model"
/>
</EuiFlyoutBody>
</EuiFlyout>
</>
);
};
</h2>
</EuiTitle>
<EuiSpacer size="s" />
<EuiTitle size="xs">
<h4>{model.model_id}</h4>
</EuiTitle>
</EuiFlyoutHeader>
<EuiFlyoutBody>
<TestTrainedModelContent model={model} />
</EuiFlyoutBody>
</EuiFlyout>
);

View file

@ -0,0 +1,37 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { FC, useState } from 'react';
import {
type TestTrainedModelsContextType,
TestTrainedModelsContext,
} from './test_trained_models_context';
import type { ModelItem } from '../models_list';
import { TestTrainedModelFlyout } from './test_flyout';
import { CreatePipelineForModelFlyout } from '../create_pipeline_for_model/create_pipeline_for_model_flyout';
interface Props {
model: ModelItem;
onClose: (refreshList?: boolean) => void;
}
export const TestModelAndPipelineCreationFlyout: FC<Props> = ({ model, onClose }) => {
const [currentContext, setCurrentContext] = useState<TestTrainedModelsContextType>({
pipelineConfig: undefined,
createPipelineFlyoutOpen: false,
});
return (
<TestTrainedModelsContext.Provider value={{ currentContext, setCurrentContext }}>
{currentContext.createPipelineFlyoutOpen === false ? (
<TestTrainedModelFlyout model={model} onClose={onClose} />
) : (
<CreatePipelineForModelFlyout model={model} onClose={onClose} />
)}
</TestTrainedModelsContext.Provider>
);
};

View file

@ -0,0 +1,115 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import React, { FC, useState, useMemo } from 'react';
import { SUPPORTED_PYTORCH_TASKS } from '@kbn/ml-trained-models-utils';
import { FormattedMessage } from '@kbn/i18n-react';
import { EuiFormRow, EuiSelect, EuiSpacer, EuiTab, EuiTabs, useEuiPaddingSize } from '@elastic/eui';
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
import { SelectedModel } from './selected_model';
import { type ModelItem } from '../models_list';
import { INPUT_TYPE } from './models/inference_base';
import { useTestTrainedModelsContext } from './test_trained_models_context';
import { type InferecePipelineCreationState } from '../create_pipeline_for_model/state';
interface ContentProps {
model: ModelItem;
handlePipelineConfigUpdate?: (configUpdate: Partial<InferecePipelineCreationState>) => void;
externalPipelineConfig?: estypes.IngestPipeline;
}
export const TestTrainedModelContent: FC<ContentProps> = ({
model,
handlePipelineConfigUpdate,
externalPipelineConfig,
}) => {
const [deploymentId, setDeploymentId] = useState<string>(model.deployment_ids[0]);
const mediumPadding = useEuiPaddingSize('m');
const [inputType, setInputType] = useState<INPUT_TYPE>(INPUT_TYPE.TEXT);
const {
currentContext: { createPipelineFlyoutOpen },
setCurrentContext,
} = useTestTrainedModelsContext();
const onlyShowTab: INPUT_TYPE | undefined = useMemo(() => {
return (model.type ?? []).includes(SUPPORTED_PYTORCH_TASKS.TEXT_EXPANSION) ||
createPipelineFlyoutOpen
? INPUT_TYPE.INDEX
: undefined;
}, [model, createPipelineFlyoutOpen]);
return (
<>
{' '}
{model.deployment_ids.length > 1 ? (
<>
<EuiFormRow
fullWidth
label={
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.deploymentIdLabel"
defaultMessage="Deployment ID"
/>
}
>
<EuiSelect
fullWidth
options={model.deployment_ids.map((v) => {
return { text: v, value: v };
})}
value={deploymentId}
onChange={(e) => {
setDeploymentId(e.target.value);
}}
/>
</EuiFormRow>
<EuiSpacer size="l" />
</>
) : null}
{onlyShowTab === undefined ? (
<>
<EuiTabs
size="m"
css={{
marginTop: `-${mediumPadding}`,
}}
>
<EuiTab
isSelected={inputType === INPUT_TYPE.TEXT}
onClick={() => setInputType(INPUT_TYPE.TEXT)}
>
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.textTab"
defaultMessage="Test using text"
/>
</EuiTab>
<EuiTab
isSelected={inputType === INPUT_TYPE.INDEX}
onClick={() => setInputType(INPUT_TYPE.INDEX)}
>
<FormattedMessage
id="xpack.ml.trainedModels.testModelsFlyout.indexTab"
defaultMessage="Test using existing index"
/>
</EuiTab>
</EuiTabs>
<EuiSpacer size="m" />
</>
) : null}
<SelectedModel
model={model}
inputType={onlyShowTab ?? inputType}
deploymentId={deploymentId ?? model.model_id}
handlePipelineConfigUpdate={handlePipelineConfigUpdate}
externalPipelineConfig={externalPipelineConfig}
setCurrentContext={setCurrentContext}
/>
</>
);
};

View file

@ -0,0 +1,32 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
import { createContext, Dispatch, useContext } from 'react';
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
export interface TestTrainedModelsContextType {
pipelineConfig?: estypes.IngestPipeline;
createPipelineFlyoutOpen: boolean;
defaultSelectedDataViewId?: string;
}
export const TestTrainedModelsContext = createContext<
| {
currentContext: TestTrainedModelsContextType;
setCurrentContext: Dispatch<TestTrainedModelsContextType>;
}
| undefined
>(undefined);
export function useTestTrainedModelsContext() {
const testTrainedModelsContext = useContext(TestTrainedModelsContext);
if (testTrainedModelsContext === undefined) {
throw new Error('TestTrainedModelsContext has not been initialized.');
}
return testTrainedModelsContext;
}

View file

@ -26820,7 +26820,6 @@
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.create.failureMessage": "Impossible de créer le pipeline \"{pipelineName}\".",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.create.successMessage": "Le pipeline \"{pipelineName}\" a été créé avec succès.",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.createDataViewLabel": "Créer une vue de données",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.description": "Ce pipeline est créé en respectant la configuration ci-dessous.",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.destinationIndexLabel": "Nom de l'index de destination",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.destIndexEmpty": "Entrer un nom d'index de destination valide",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.destIndexExists": "Un index portant ce nom existe déjà.",

View file

@ -26820,7 +26820,6 @@
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.create.failureMessage": "'{pipelineName}'を作成できません。",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.create.successMessage": "'{pipelineName}'は正常に作成されました。",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.createDataViewLabel": "データビューを作成",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.description": "このパイプラインは以下の構成で作成されます。",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.destinationIndexLabel": "デスティネーションインデックス名",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.destIndexEmpty": "有効なデスティネーションインデックス名を入力",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.destIndexExists": "この名前のインデックスがすでに存在します。",

View file

@ -26818,7 +26818,6 @@
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.create.failureMessage": "无法创建“{pipelineName}”。",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.create.successMessage": "已成功创建“{pipelineName}”。",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.createDataViewLabel": "创建数据视图",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.description": "将使用以下配置创建此管道。",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.destinationIndexLabel": "目标索引名称",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.destIndexEmpty": "输入有效的目标索引",
"xpack.ml.trainedModels.content.indices.pipelines.addInferencePipelineModal.steps.review.destIndexExists": "已存在具有此名称的索引。",