[8.15] [Index management] Refactor api_integration tests for create inference endpoint (#187521) (#188045)

# Backport

This will backport the following commits from `main` to `8.15`:
- [[Index management] Refactor api_integration tests for create
inference endpoint
(#187521)](https://github.com/elastic/kibana/pull/187521)

Tested the changes locally and against cloud project with this PR docker
image

<!--- Backport version: 8.9.8 -->

### Questions ?
Please refer to the [Backport tool
documentation](https://github.com/sqren/backport)

<!--BACKPORT [{"author":{"name":"Saarika
Bhasi","email":"55930906+saarikabhasi@users.noreply.github.com"},"sourceCommit":{"committedDate":"2024-07-10T15:18:53Z","message":"[Index
management] Refactor api_integration tests for create inference endpoint
(#187521)\n\n## Summary\r\n * delete underlying trained model during
`after all` clean up \r\n * handle request time out error when creating
inference endpoint\r\n\r\nTested against QA deployment and
locally.\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine
<42973632+kibanamachine@users.noreply.github.com>","sha":"46b21546aac66ca509f4c1ea197c12eac5675f32","branchLabelMapping":{"^v8.16.0$":"main","^v(\\d+).(\\d+).\\d+$":"$1.$2"}},"sourcePullRequest":{"labels":["release_note:skip","v8.15.0","v8.16.0"],"number":187521,"url":"https://github.com/elastic/kibana/pull/187521","mergeCommit":{"message":"[Index
management] Refactor api_integration tests for create inference endpoint
(#187521)\n\n## Summary\r\n * delete underlying trained model during
`after all` clean up \r\n * handle request time out error when creating
inference endpoint\r\n\r\nTested against QA deployment and
locally.\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine
<42973632+kibanamachine@users.noreply.github.com>","sha":"46b21546aac66ca509f4c1ea197c12eac5675f32"}},"sourceBranch":"main","suggestedTargetBranches":["8.15"],"targetPullRequestStates":[{"branch":"8.15","label":"v8.15.0","labelRegex":"^v(\\d+).(\\d+).\\d+$","isSourceBranch":false,"state":"NOT_CREATED"},{"branch":"main","label":"v8.16.0","labelRegex":"^v8.16.0$","isSourceBranch":true,"state":"MERGED","url":"https://github.com/elastic/kibana/pull/187521","number":187521,"mergeCommit":{"message":"[Index
management] Refactor api_integration tests for create inference endpoint
(#187521)\n\n## Summary\r\n * delete underlying trained model during
`after all` clean up \r\n * handle request time out error when creating
inference endpoint\r\n\r\nTested against QA deployment and
locally.\r\n\r\n---------\r\n\r\nCo-authored-by: kibanamachine
<42973632+kibanamachine@users.noreply.github.com>","sha":"46b21546aac66ca509f4c1ea197c12eac5675f32"}}]}]
BACKPORT-->
This commit is contained in:
Saarika Bhasi 2024-07-11 11:44:46 -04:00 committed by GitHub
parent 4874e64253
commit 2dc9f66822
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 82 additions and 70 deletions

View file

@ -6,6 +6,7 @@
*/
import expect from '@kbn/expect';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import { FtrProviderContext } from '../../../ftr_provider_context';
const API_BASE_PATH = '/api/index_management';
@ -17,46 +18,48 @@ export default function ({ getService }: FtrProviderContext) {
const inferenceId = 'my-elser-model';
const taskType = 'sparse_embedding';
const service = 'elser';
const modelId = '.elser_model_2';
describe('Inference endpoints', function () {
before(async () => {
log.debug(`Creating inference endpoint`);
try {
await ml.api.createInferenceEndpoint(inferenceId, taskType, {
service,
service_settings: {
num_allocations: 1,
num_threads: 1,
},
});
} catch (err) {
log.debug('[Setup error] Error creating inference endpoint');
throw err;
}
});
after(async () => {
// Cleanup inference endpoints created for testing purposes
try {
log.debug(`Deleting inference endpoint`);
await ml.api.deleteInferenceEndpoint(inferenceId, taskType);
log.debug(`Deleting underlying trained model`);
await ml.api.deleteTrainedModelES(modelId);
await ml.testResources.cleanMLSavedObjects();
} catch (err) {
log.debug('[Cleanup error] Error deleting inference endpoint');
log.debug('[Cleanup error] Error deleting trained model or saved ml objects');
throw err;
}
});
describe('get inference endpoints', () => {
it('returns the existing inference endpoints', async () => {
const { body: inferenceEndpoints } = await supertest
.get(`${API_BASE_PATH}/inference/all`)
.set('kbn-xsrf', 'xxx')
.set('x-elastic-internal-origin', 'xxx')
.expect(200);
expect(inferenceEndpoints).to.be.ok();
expect(inferenceEndpoints[0].model_id).to.eql(inferenceId);
it('create inference endpoint', async () => {
log.debug(`create inference endpoint`);
await ml.api.createInferenceEndpoint(inferenceId, taskType, {
service,
service_settings: {
num_allocations: 1,
num_threads: 1,
model_id: modelId,
},
});
});
it('get all inference endpoints and confirm inference endpoint exist', async () => {
const { body: inferenceEndpoints } = await supertest
.get(`${API_BASE_PATH}/inference/all`)
.set('kbn-xsrf', 'xxx')
.set('x-elastic-internal-origin', 'xxx')
.expect(200);
expect(inferenceEndpoints).to.be.ok();
expect(
inferenceEndpoints.some(
(endpoint: InferenceAPIConfigResponse) => endpoint.model_id === inferenceId
)
).to.be(true);
});
it('can delete inference endpoint', async () => {
log.debug(`Deleting inference endpoint`);
await ml.api.deleteInferenceEndpoint(inferenceId, taskType);
log.debug('> Inference endpoint deleted');
});
});
}

View file

@ -247,18 +247,22 @@ export function MachineLearningAPIProvider({ getService }: FtrProviderContext) {
log.debug(`Inference endpoint '${inferenceId}' already exists. Nothing to create.`);
return;
}
const { body, status } = await esSupertest
.put(`/_inference/${taskType}/${inferenceId}`)
const response = await kbnSupertest
.put(`/internal/ml/_inference/${taskType}/${inferenceId}`)
.set(getCommonRequestHeader('1'))
.send(requestBody);
this.assertResponseStatusCode(200, status, body);
return body;
this.assertResponseStatusCode(200, response.status, response.body);
log.debug('> Inference endpoint created');
return response;
},
async deleteInferenceEndpoint(inferenceId: string, taskType: string) {
const { body, status } = await esSupertest.delete(`/_inference/${taskType}/${inferenceId}`);
this.assertResponseStatusCode(200, status, body);
expect(body)
.to.have.property('acknowledged')
.eql(true, 'Response for delete inference endpoint should be acknowledged');
return body;
},

View file

@ -172,6 +172,7 @@
"@kbn/alerting-comparators",
"@kbn/alerting-state-types",
"@kbn/reporting-server",
"@kbn/data-quality-plugin"
"@kbn/data-quality-plugin",
"@kbn/ml-trained-models-utils"
]
}

View file

@ -6,6 +6,7 @@
*/
import expect from '@kbn/expect';
import { InferenceAPIConfigResponse } from '@kbn/ml-trained-models-utils';
import { FtrProviderContext } from '../../../ftr_provider_context';
const API_BASE_PATH = '/api/index_management';
@ -18,48 +19,50 @@ export default function ({ getService }: FtrProviderContext) {
const taskType = 'sparse_embedding';
const service = 'elser';
const modelId = '.elser_model_2';
// FLAKY: https://github.com/elastic/kibana/issues/185216
describe.skip('Inference endpoints', function () {
// test adds new trained model '.elser_model_2_linux-x86_64', but does not clean it. Follow up tests are affected
this.tags(['failsOnMKI']);
before(async () => {
log.debug(`Creating inference endpoint`);
try {
await ml.api.createInferenceEndpoint(inferenceId, taskType, {
service,
service_settings: {
num_allocations: 1,
num_threads: 1,
},
});
} catch (err) {
log.debug('[Setup error] Error creating inference endpoint');
throw err;
}
});
describe('Inference endpoints', function () {
after(async () => {
// Cleanup inference endpoints created for testing purposes
try {
log.debug(`Deleting inference endpoint`);
await ml.api.deleteInferenceEndpoint(inferenceId, taskType);
log.debug(`Deleting underlying trained model`);
await ml.api.deleteTrainedModelES(modelId);
await ml.testResources.cleanMLSavedObjects();
} catch (err) {
log.debug('[Cleanup error] Error deleting inference endpoint');
log.debug('[Cleanup error] Error deleting trained model and saved ml objects');
throw err;
}
});
describe('get inference endpoints', () => {
it('returns the existing inference endpoints', async () => {
const { body: inferenceEndpoints } = await supertest
.get(`${API_BASE_PATH}/inference/all`)
.set('kbn-xsrf', 'xxx')
.set('x-elastic-internal-origin', 'xxx')
.expect(200);
expect(inferenceEndpoints).to.be.ok();
expect(inferenceEndpoints[0].model_id).to.eql(inferenceId);
it('create inference endpoint', async () => {
log.debug(`create inference endpoint`);
await ml.api.createInferenceEndpoint(inferenceId, taskType, {
service,
service_settings: {
num_allocations: 1,
num_threads: 1,
model_id: modelId,
},
});
});
it('get all inference endpoints and confirm inference endpoint exist', async () => {
const { body: inferenceEndpoints } = await supertest
.get(`${API_BASE_PATH}/inference/all`)
.set('kbn-xsrf', 'xxx')
.set('x-elastic-internal-origin', 'xxx')
.expect(200);
expect(inferenceEndpoints).to.be.ok();
expect(
inferenceEndpoints.some(
(endpoint: InferenceAPIConfigResponse) => endpoint.model_id === inferenceId
)
).to.be(true);
});
it('can delete inference endpoint', async () => {
log.debug(`Deleting inference endpoint`);
await ml.api.deleteInferenceEndpoint(inferenceId, taskType);
log.debug('> Inference endpoint deleted');
});
});
}

View file

@ -105,5 +105,6 @@
"@kbn/config-schema",
"@kbn/features-plugin",
"@kbn/observability-ai-assistant-plugin",
"@kbn/ml-trained-models-utils",
]
}