Upgrade elasticsearch-js to 8.13.0 (#179747)

This commit is contained in:
Alejandro Fernández Haro 2024-04-03 17:56:29 +02:00 committed by GitHub
parent 982303882e
commit 947dac2846
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
32 changed files with 22 additions and 75 deletions

View file

@ -104,7 +104,7 @@
"@elastic/charts": "64.0.2",
"@elastic/datemath": "5.0.3",
"@elastic/ecs": "^8.11.1",
"@elastic/elasticsearch": "^8.12.2",
"@elastic/elasticsearch": "^8.13.0",
"@elastic/ems-client": "8.5.1",
"@elastic/eui": "93.5.2",
"@elastic/filesaver": "1.1.2",

View file

@ -163,9 +163,9 @@ export const incrementCounterInternal = async <T>(
...(savedObjectNamespaces && { namespaces: savedObjectNamespaces }),
...(originId && { originId }),
updated_at: time,
references: body.get?._source.references ?? [],
references: body.get?._source?.references ?? [],
version: encodeHitVersion(body),
attributes: body.get?._source[type],
attributes: body.get?._source?.[type],
...(managed && { managed }),
};
};

View file

@ -123,7 +123,7 @@ export async function internalBulkResolve<T>(
});
const aliasDoc = aliasDocs[i];
if (aliasDoc?.found) {
const legacyUrlAlias: LegacyUrlAlias = aliasDoc._source[LEGACY_URL_ALIAS_TYPE];
const legacyUrlAlias: LegacyUrlAlias = aliasDoc._source![LEGACY_URL_ALIAS_TYPE];
if (!legacyUrlAlias.disabled) {
docsToBulkGet.push({
// also attempt to find a match for the legacy URL alias target ID

View file

@ -86,7 +86,6 @@ export async function callFieldCapsApi(params: FieldCapsApiParams) {
index_filter: indexFilter,
expand_wildcards: expandWildcards,
types: fieldTypes,
// @ts-expect-error
include_empty_fields: includeEmptyFields ?? true,
...fieldCapsOptions,
},

View file

@ -47,7 +47,6 @@ export const fetchIndexInfo = async (
{
index,
fields: '*',
// @ts-expect-error include_empty_fields missing from FieldCapsRequest
include_empty_fields: false,
},
{ signal: abortSignal, maxRetries: 0 }

View file

@ -79,7 +79,6 @@ describe('getMlModelTypesForModelConfig lib function', () => {
});
describe('generateMlInferencePipelineBody lib function', () => {
// @ts-expect-error pipeline._meta defined as mandatory
const expected: MlInferencePipeline = {
description: 'my-description',
processors: [
@ -202,7 +201,6 @@ describe('generateMlInferencePipelineBody lib function', () => {
describe('parseMlInferenceParametersFromPipeline', () => {
it('returns pipeline parameters from ingest pipeline', () => {
expect(
// @ts-expect-error pipeline._meta defined as mandatory
parseMlInferenceParametersFromPipeline('unit-test', {
processors: [
{
@ -230,7 +228,6 @@ describe('parseMlInferenceParametersFromPipeline', () => {
});
it('returns pipeline parameters from ingest pipeline with multiple inference processors', () => {
expect(
// @ts-expect-error pipeline._meta defined as mandatory
parseMlInferenceParametersFromPipeline('unit-test', {
processors: [
{
@ -270,12 +267,10 @@ describe('parseMlInferenceParametersFromPipeline', () => {
});
});
it('return null if pipeline is missing inference processor', () => {
// @ts-expect-error pipeline._meta defined as mandatory
expect(parseMlInferenceParametersFromPipeline('unit-test', { processors: [] })).toBeNull();
});
it('return null if pipeline is missing field_map', () => {
expect(
// @ts-expect-error pipeline._meta defined as mandatory
parseMlInferenceParametersFromPipeline('unit-test', {
processors: [
{

View file

@ -56,7 +56,6 @@ export const generateMlInferencePipelineBody = ({
model,
pipelineName,
}: MlInferencePipelineParams): MlInferencePipeline => {
// @ts-expect-error pipeline._meta defined as mandatory
const pipelineDefinition: MlInferencePipeline = {
description: description ?? '',
processors: [],
@ -186,7 +185,6 @@ export const parseMlInferenceParametersFromPipeline = (
})
.filter((f) => f.sourceField) as FieldMapping[];
// @ts-expect-error pipeline._meta defined as mandatory
return fieldMappings.length === 0
? null
: {

View file

@ -34,7 +34,6 @@ describe('CreateMlInferencePipelineApiLogic', () => {
indexName: 'my-index',
modelId: 'my-model-id',
pipelineName: 'my-pipeline',
// @ts-expect-error pipeline._meta defined as mandatory
pipelineDefinition: { processors: [], version: 1 },
};
const result = await createMlInferencePipeline(args);

View file

@ -199,7 +199,6 @@ describe('MlInferenceLogic', () => {
],
indexName: 'test',
modelId: 'test-model',
// @ts-expect-error pipeline._meta defined as mandatory
pipelineDefinition: {},
pipelineName: 'unit-test',
});
@ -260,7 +259,6 @@ describe('MlInferenceLogic', () => {
],
});
MLInferenceLogic.actions.fetchPipelineSuccess({
// @ts-expect-error pipeline._meta defined as mandatory
'mock-pipeline': {},
});
@ -332,7 +330,6 @@ describe('MlInferenceLogic', () => {
version: 1,
};
FetchMlInferencePipelinesApiLogic.actions.apiSuccess({
// @ts-expect-error pipeline._meta defined as mandatory
'unit-test': existingPipeline,
});
MLInferenceLogic.actions.setInferencePipelineConfiguration({
@ -487,7 +484,6 @@ describe('MlInferenceLogic', () => {
jest.spyOn(MLInferenceLogic.actions, 'setAddInferencePipelineStep');
MLInferenceLogic.actions.fetchPipelineSuccess({
// @ts-expect-error pipeline._meta defined as mandatory
'mock-pipeline': {},
});
expect(MLInferenceLogic.actions.setAddInferencePipelineStep).toHaveBeenCalledWith(

View file

@ -60,7 +60,6 @@ const DEFAULT_MODELS: MlModel[] = [
];
const DEFAULT_PIPELINES: FetchMlInferencePipelinesResponse = {
// @ts-expect-error pipeline._meta defined as mandatory
'my-pipeline': {
processors: [
{
@ -131,7 +130,6 @@ describe('PipelineSelectLogic', () => {
jest.spyOn(PipelineSelectLogic.actions, 'setInferencePipelineConfiguration');
FetchMlInferencePipelinesApiLogic.actions.apiSuccess({
// @ts-expect-error pipeline._meta defined as mandatory
'my-pipeline': {
processors: [
{
@ -186,7 +184,6 @@ describe('PipelineSelectLogic', () => {
});
it('returns disabled pipeline option if missing source fields', () => {
FetchMlInferencePipelinesApiLogic.actions.apiSuccess({
// @ts-expect-error pipeline._meta defined as mandatory
'my-pipeline': {
processors: [
{
@ -235,7 +232,6 @@ describe('PipelineSelectLogic', () => {
});
it('returns enabled pipeline option if model is redacted', () => {
FetchMlInferencePipelinesApiLogic.actions.apiSuccess({
// @ts-expect-error pipeline._meta defined as mandatory
'my-pipeline': {
processors: [
{

View file

@ -46,7 +46,6 @@ const DEFAULT_VALUES: TestPipelineValues = {
simulatePipelineStatus: 0,
};
// @ts-expect-error pipeline._meta defined as mandatory
const mockInferencePipeline: MlInferencePipeline = {
processors: [],
version: 1,

View file

@ -61,7 +61,6 @@ describe('IndexPipelinesConfigurationsLogic', () => {
version: 1,
},
};
// @ts-expect-error pipeline._meta defined as mandatory
FetchCustomPipelineApiLogic.actions.apiSuccess(pipelines);
await nextTick();
@ -73,7 +72,6 @@ describe('IndexPipelinesConfigurationsLogic', () => {
version: 1,
},
};
// @ts-expect-error pipeline._meta defined as mandatory
FetchCustomPipelineApiLogic.actions.apiSuccess(pipelines);
await nextTick();
@ -94,7 +92,6 @@ describe('IndexPipelinesConfigurationsLogic', () => {
version: 1,
},
};
// @ts-expect-error pipeline._meta defined as mandatory
FetchCustomPipelineApiLogic.actions.apiSuccess(pipelines);
await nextTick();
@ -115,7 +112,6 @@ describe('IndexPipelinesConfigurationsLogic', () => {
version: 3,
},
};
// @ts-expect-error pipeline._meta defined as mandatory
FetchCustomPipelineApiLogic.actions.apiSuccess(pipelines);
IndexPipelinesConfigurationsLogic.actions.selectPipeline('foo');
await nextTick();

View file

@ -153,7 +153,6 @@ describe('PipelinesLogic', () => {
PipelinesLogic.actions.savePipeline = jest.fn();
PipelinesLogic.actions.fetchCustomPipeline = jest.fn();
PipelinesLogic.actions.fetchIndexApiSuccess(connectorIndex);
// @ts-expect-error pipeline._meta defined as mandatory
PipelinesLogic.actions.createCustomPipelineSuccess({ [connectorIndex.name]: {} });
expect(flashSuccessToast).toHaveBeenCalledWith('Custom pipeline created');
expect(PipelinesLogic.actions.setPipelineState).toHaveBeenCalledWith({
@ -217,7 +216,6 @@ describe('PipelinesLogic', () => {
...apiIndex,
});
const indexName = apiIndex.name;
// @ts-expect-error pipeline._meta defined as mandatory
const indexPipelines: Record<string, IngestPipeline> = {
[indexName]: {
processors: [],

View file

@ -56,7 +56,6 @@ describe('createMlInferencePipeline lib function', () => {
const actualResult = await createMlInferencePipeline(
pipelineName,
// @ts-expect-error pipeline._meta defined as mandatory
pipelineDefinition,
mockClient as unknown as ElasticsearchClient
);
@ -68,7 +67,6 @@ describe('createMlInferencePipeline lib function', () => {
it('should convert spaces to underscores in the pipeline name', async () => {
await createMlInferencePipeline(
'my pipeline with spaces ',
// @ts-expect-error pipeline._meta defined as mandatory
pipelineDefinition,
mockClient as unknown as ElasticsearchClient
);
@ -89,7 +87,6 @@ describe('createMlInferencePipeline lib function', () => {
const actualResult = createMlInferencePipeline(
pipelineName,
// @ts-expect-error pipeline._meta defined as mandatory
pipelineDefinition,
mockClient as unknown as ElasticsearchClient
);

View file

@ -337,7 +337,6 @@ describe('getMlInferencePipelineProcessorNamesFromPipelines', () => {
const expected = ['ml-inference-pipeline-1'];
const processorNames = getMlInferencePipelineProcessorNamesFromPipelines(
'my-index',
// @ts-expect-error pipeline._meta defined as mandatory
mockGetPipeline
);
expect(processorNames).toEqual(expected);
@ -345,7 +344,6 @@ describe('getMlInferencePipelineProcessorNamesFromPipelines', () => {
it('should return an empty array for a missing @ml-inference pipeline', () => {
const processorNames = getMlInferencePipelineProcessorNamesFromPipelines(
'my-index-without-ml-inference-pipeline',
// @ts-expect-error pipeline._meta defined as mandatory
mockGetPipeline
);
@ -355,7 +353,6 @@ describe('getMlInferencePipelineProcessorNamesFromPipelines', () => {
const processorNames = getMlInferencePipelineProcessorNamesFromPipelines(
'my-index-without-ml-inference-pipeline',
{
// @ts-expect-error pipeline._meta defined as mandatory
'my-index-without-ml-inference-pipeline': {},
}
);

View file

@ -36,7 +36,6 @@ export const createIndexPipelineDefinitions = async (
version: 1,
};
await esClient.ingest.putPipeline(mlPipeline);
// @ts-expect-error pipeline._meta defined as mandatory
result = { ...result, [mlPipeline.id]: mlPipeline };
const customPipeline = {
description: `Enterprise Search customizable ingest pipeline for the '${indexName}' index`,
@ -45,7 +44,6 @@ export const createIndexPipelineDefinitions = async (
version: 1,
};
await esClient.ingest.putPipeline(customPipeline);
// @ts-expect-error pipeline._meta defined as mandatory
result = { ...result, [customPipeline.id]: customPipeline };
const ingestPipeline = {
_meta: {

View file

@ -467,7 +467,6 @@ export function registerIndexRoutes({
const createPipelineResult = await preparePipelineAndIndexForMlInference(
indexName,
pipelineName,
// @ts-expect-error pipeline._meta defined as mandatory
pipelineDefinition,
modelId,
fieldMappings,
@ -664,7 +663,6 @@ export function registerIndexRoutes({
const simulateRequest: IngestSimulateRequest = {
docs,
// @ts-expect-error pipeline._meta defined as mandatory
pipeline: { description: defaultDescription, ...pipeline },
};

View file

@ -22,7 +22,6 @@ export async function analyzeFile(
const body = await client.asInternalUser.textStructure.findStructure(
{
body: data,
// @ts-expect-error TextStructureFindStructureRequest type is out of date and doesn't include ecs_compatibility
ecs_compatibility: 'v1',
...overrides,
},

View file

@ -7,9 +7,7 @@
import { schema } from '@kbn/config-schema';
// @ts-ignore
import { GrokdebuggerRequest } from '../../../models/grokdebugger_request';
// @ts-ignore
import { GrokdebuggerResponse } from '../../../models/grokdebugger_response';
import { handleEsError } from '../../../shared_imports';
@ -37,7 +35,6 @@ export function registerGrokSimulateRoute(framework: KibanaFramework) {
const esClient = (await requestContext.core).elasticsearch.client;
const grokdebuggerRequest = GrokdebuggerRequest.fromDownstreamJSON(request.body);
const simulateResponseFromES = await esClient.asCurrentUser.ingest.simulate({
// @ts-expect-error pipeline._meta defined as mandatory
body: grokdebuggerRequest.upstreamJSON,
});
const grokdebuggerResponse = GrokdebuggerResponse.fromUpstreamJSON(simulateResponseFromES);

View file

@ -12,7 +12,6 @@ describe('pipeline_serialization', () => {
it('should deserialize pipelines', () => {
expect(
deserializePipelines({
// @ts-expect-error pipeline._meta defined as mandatory
pipeline1: {
description: 'pipeline 1 description',
version: 1,
@ -32,7 +31,6 @@ describe('pipeline_serialization', () => {
},
],
},
// @ts-expect-error pipeline._meta defined as mandatory
pipeline2: {
description: 'pipeline2 description',
version: 1,

View file

@ -37,7 +37,6 @@ export const registerSimulateRoute = ({
const response = await clusterClient.asCurrentUser.ingest.simulate({
verbose,
body: {
// @ts-expect-error pipeline._meta defined as mandatory
pipeline,
docs: documents as estypes.IngestSimulateDocument[],
},

View file

@ -61,7 +61,6 @@ export const AddInferencePipelineFlyout: FC<AddInferencePipelineFlyoutProps> = (
const createPipeline = async () => {
setFormState({ ...formState, creatingPipeline: true });
try {
// @ts-expect-error pipeline._meta is defined as mandatory
await createInferencePipeline(formState.pipelineName, getPipelineConfig(formState));
setFormState({
...formState,
@ -169,7 +168,6 @@ export const AddInferencePipelineFlyout: FC<AddInferencePipelineFlyoutProps> = (
)}
{step === ADD_INFERENCE_PIPELINE_STEPS.CREATE && (
<ReviewAndCreatePipeline
// @ts-expect-error pipeline._meta is defined as mandatory
inferencePipeline={getPipelineConfig(formState)}
modelType={modelType}
pipelineName={formState.pipelineName}

View file

@ -92,7 +92,6 @@ export const TestPipeline: FC<Props> = memo(({ state, sourceIndex, mode }) => {
const simulatePipeline = async () => {
try {
const result = await trainedModelPipelineSimulate(
// @ts-expect-error pipeline._meta is defined as mandatory
pipelineConfig,
JSON.parse(sampleDocsString) as IngestSimulateDocument[]
);

View file

@ -15,7 +15,6 @@ export function getPipelineConfig(state: InferecePipelineCreationState): estypes
? initialPipelineConfig?.processors[0]
: {};
// @ts-expect-error pipeline._meta is defined as mandatory
return {
description: pipelineDescription,
processors: [

View file

@ -71,7 +71,6 @@ export abstract class InferenceBase<TInferResponse> {
private inferenceError$ = new BehaviorSubject<MLHttpFetchError | null>(null);
private runningState$ = new BehaviorSubject<RUNNING_STATE>(RUNNING_STATE.STOPPED);
private isValid$ = new BehaviorSubject<boolean>(false);
// @ts-expect-error pipeline._meta is defined as mandatory
private pipeline$ = new BehaviorSubject<estypes.IngestPipeline>({});
private supportedFieldTypes: ES_FIELD_TYPES[] = [ES_FIELD_TYPES.TEXT];
private selectedDataViewId: string | undefined;
@ -248,7 +247,6 @@ export abstract class InferenceBase<TInferResponse> {
protected abstract inferIndex(): Promise<TInferResponse[]>;
public generatePipeline(): estypes.IngestPipeline {
// @ts-expect-error pipeline._meta is defined as mandatory
return {
processors: this.getProcessors(),
};

View file

@ -448,7 +448,6 @@ export function trainedModelsRoutes(
try {
const { pipeline, pipelineName } = request.body;
const body = await modelsProvider(client, mlClient, cloud).createInferencePipeline(
// @ts-expect-error pipeline._meta is defined as mandatory
pipeline!,
pipelineName
);

View file

@ -94,7 +94,6 @@ export function definePutRolesRoutes({
await esClient.asCurrentUser.security.putRole({
name: request.params.name,
// @ts-expect-error RoleIndexPrivilege is not compatible. grant is required in IndicesPrivileges.field_security
body,
});

View file

@ -8,7 +8,6 @@
import { TypeOf } from '@kbn/config-schema';
import type {
SnapshotGetRepositoryResponse,
SnapshotRepositorySettings,
PluginStats,
} from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
@ -292,9 +291,10 @@ export function registerRepositoriesRoutes({
const response = await clusterClient.asCurrentUser.snapshot.createRepository({
name,
body: {
// @ts-expect-error upgrade to @elastic/elasticsearch v8.13.0: can't be string, only valid "source"
type,
// TODO: Bring {@link RepositorySettings} in line with {@link SnapshotRepositorySettings}
settings: serializeRepositorySettings(settings) as SnapshotRepositorySettings,
settings: serializeRepositorySettings(settings),
},
verify: false,
});
@ -325,9 +325,10 @@ export function registerRepositoriesRoutes({
// Otherwise update repository
const response = await clusterClient.asCurrentUser.snapshot.createRepository({
name,
body: {
repository: {
// @ts-expect-error our type is a string, which doesn't match any of the overloads "source", "s3", "azure", ...
type,
settings: serializeRepositorySettings(settings) as SnapshotRepositorySettings,
settings: serializeRepositorySettings(settings),
},
verify: false,
});

View file

@ -16,16 +16,12 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
await es.snapshot.createRepository({
name: repoName,
verify: true,
type: 'fs',
repository: {
type: 'fs',
settings: {
location: 'temp',
},
},
settings: {
location: 'temp',
},
});
}

View file

@ -253,7 +253,6 @@ export default function ({ getService }: FtrProviderContext) {
await ml.testExecution.logTestStep(
'should complete the deploy model pipeline Create pipeline step'
);
// @ts-expect-error pipeline._meta is defined as mandatory
await ml.deployDFAModelFlyout.completeTrainedModelsInferenceFlyoutCreateStep({
description: modelWithoutPipelineDataExpectedValues.description,
processors: [
@ -315,7 +314,6 @@ export default function ({ getService }: FtrProviderContext) {
await ml.testExecution.logTestStep(
'should complete the deploy model pipeline Create pipeline step'
);
// @ts-expect-error pipeline._meta is defined as mandatory
await ml.deployDFAModelFlyout.completeTrainedModelsInferenceFlyoutCreateStep({
description: modelWithoutPipelineDataExpectedValues.duplicateDescription,
processors: [

View file

@ -23,12 +23,14 @@ export default ({ getPageObjects, getService }: FtrProviderContext) => {
// Create a repository
await es.snapshot.createRepository({
name: 'my-repository',
type: 'fs',
settings: {
location: '/tmp/es-backups/',
compress: true,
},
verify: true,
repository: {
type: 'fs',
settings: {
location: '/tmp/es-backups/',
compress: true,
},
},
});
// Create a snapshot

View file

@ -1718,12 +1718,12 @@
"@elastic/transport" "^8.3.1"
tslib "^2.4.0"
"@elastic/elasticsearch@^8.12.2":
version "8.12.2"
resolved "https://registry.yarnpkg.com/@elastic/elasticsearch/-/elasticsearch-8.12.2.tgz#7a241f739a509cc59faee85f79a4c9e9e5ba9128"
integrity sha512-04NvH3LIgcv1Uwguorfw2WwzC9Lhfsqs9f0L6uq6MrCw0lqe/HOQ6E8vJ6EkHAA15iEfbhtxOtenbZVVcE+mAQ==
"@elastic/elasticsearch@^8.13.0":
version "8.13.0"
resolved "https://registry.yarnpkg.com/@elastic/elasticsearch/-/elasticsearch-8.13.0.tgz#625c6fba3caf944370c6859482fbd5cf3543aea8"
integrity sha512-OAYgzqArPqgDaIJ1yT0RX31YCgr1lleo53zL+36i23PFjHu08CA6Uq+BmBzEV05yEidl+ILPdeSfF3G8hPG/JQ==
dependencies:
"@elastic/transport" "^8.4.1"
"@elastic/transport" "^8.4.0"
tslib "^2.4.0"
"@elastic/ems-client@8.5.1":
@ -1898,7 +1898,7 @@
undici "^5.21.2"
yaml "^2.2.2"
"@elastic/transport@^8.3.1", "@elastic/transport@^8.4.1":
"@elastic/transport@^8.3.1", "@elastic/transport@^8.4.0":
version "8.4.1"
resolved "https://registry.yarnpkg.com/@elastic/transport/-/transport-8.4.1.tgz#f98c5a5e2156bcb3f01170b4aca7e7de4d8b61b8"
integrity sha512-/SXVuVnuU5b4dq8OFY4izG+dmGla185PcoqgK6+AJMpmOeY1QYVNbWtCwvSvoAANN5D/wV+EBU8+x7Vf9EphbA==