[ZDT] SOR: handle higher-version documents retrieved from persistence. (#158251)

## Summary

Follow-up of https://github.com/elastic/kibana/pull/157895
Part of https://github.com/elastic/kibana/issues/150312

Adapt the SOR to accept retrieving documents on higher versions from the
persistence, and convert them back to the latest knows version (using
the version schema feature added in
https://github.com/elastic/kibana/pull/157895).
This commit is contained in:
Pierre Gayvallet 2023-05-30 03:11:56 -04:00 committed by GitHub
parent e928b519f3
commit 50950e9c3b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
18 changed files with 438 additions and 79 deletions

View file

@ -71,6 +71,7 @@ export const performBulkCreate = async <T>(
encryption: encryptionHelper,
preflight: preflightHelper,
serializer: serializerHelper,
migration: migrationHelper,
} = helpers;
const { securityExtension } = extensions;
const namespace = commonHelper.getCurrentNamespace(options.namespace);
@ -213,7 +214,7 @@ export const performBulkCreate = async <T>(
const originId = Object.keys(object).includes('originId')
? object.originId
: existingOriginId;
const migrated = migrator.migrateDocument({
const migrated = migrationHelper.migrateInputDocument({
id: object.id,
type: object.type,
attributes: await encryptionHelper.optionallyEncryptAttributes(

View file

@ -60,6 +60,7 @@ export const performBulkGet = async <T>(
common: commonHelper,
validation: validationHelper,
encryption: encryptionHelper,
migration: migrationHelper,
} = helpers;
const { securityExtension, spacesExtension } = extensions;
@ -203,7 +204,7 @@ export const performBulkGet = async <T>(
const document = getSavedObjectFromSource(registry, type, id, doc, {
migrationVersionCompatibility,
});
const migrated = migrator.migrateDocument(document);
const migrated = migrationHelper.migrateStorageDocument(document);
return migrated;
}),

View file

@ -44,6 +44,7 @@ export const performCreate = async <T>(
encryption: encryptionHelper,
preflight: preflightHelper,
serializer: serializerHelper,
migration: migrationHelper,
} = helpers;
const { securityExtension } = extensions;
@ -114,7 +115,7 @@ export const performCreate = async <T>(
// 1. If the originId has been *explicitly set* in the options (defined or undefined), respect that.
// 2. Otherwise, preserve the originId of the existing object that is being overwritten, if any.
const originId = Object.keys(options).includes('originId') ? options.originId : existingOriginId;
const migrated = migrator.migrateDocument({
const migrated = migrationHelper.migrateInputDocument({
id,
type,
...(savedObjectNamespace && { namespace: savedObjectNamespace }),

View file

@ -57,6 +57,7 @@ export const performFind = async <T = unknown, A = unknown>(
common: commonHelper,
encryption: encryptionHelper,
serializer: serializerHelper,
migration: migrationHelper,
} = helpers;
const { securityExtension, spacesExtension } = extensions;
let namespaces!: string[];
@ -237,15 +238,18 @@ export const performFind = async <T = unknown, A = unknown>(
per_page: perPage,
total: body.hits.total,
saved_objects: body.hits.hits.map(
(hit: estypes.SearchHit<SavedObjectsRawDocSource>): SavedObjectsFindResult => ({
...(migrator.migrateDocument(
(hit: estypes.SearchHit<SavedObjectsRawDocSource>): SavedObjectsFindResult => {
const savedObject = migrationHelper.migrateStorageDocument(
serializerHelper.rawToSavedObject(hit as SavedObjectsRawDoc, {
migrationVersionCompatibility,
})
) as SavedObject),
score: hit._score!,
sort: hit.sort,
})
) as SavedObject;
return {
...savedObject,
score: hit._score!,
sort: hit.sort,
};
}
),
pit_id: body.pit_id,
} as typeof result;

View file

@ -34,7 +34,11 @@ export const performGet = async <T>(
extensions = {},
}: ApiExecutionContext
): Promise<SavedObject<T>> => {
const { common: commonHelper, encryption: encryptionHelper } = helpers;
const {
common: commonHelper,
encryption: encryptionHelper,
migration: migrationHelper,
} = helpers;
const { securityExtension } = extensions;
const namespace = commonHelper.getCurrentNamespace(options.namespace);
@ -82,7 +86,7 @@ export const performGet = async <T>(
let migrated: SavedObject<T>;
try {
migrated = migrator.migrateDocument(document) as SavedObject<T>;
migrated = migrationHelper.migrateStorageDocument(document) as SavedObject<T>;
} catch (error) {
throw SavedObjectsErrorHelpers.decorateGeneralError(
error,

View file

@ -11,13 +11,16 @@ import type { IEncryptionHelper } from './encryption';
import type { IValidationHelper } from './validation';
import type { IPreflightCheckHelper } from './preflight_check';
import type { ISerializerHelper } from './serializer';
import type { IMigrationHelper } from './migration';
export { CommonHelper } from './common';
export { EncryptionHelper } from './encryption';
export { ValidationHelper } from './validation';
export { SerializerHelper } from './serializer';
export { CommonHelper, type ICommonHelper } from './common';
export { EncryptionHelper, type IEncryptionHelper } from './encryption';
export { ValidationHelper, type IValidationHelper } from './validation';
export { SerializerHelper, type ISerializerHelper } from './serializer';
export { MigrationHelper, type IMigrationHelper } from './migration';
export {
PreflightCheckHelper,
type IPreflightCheckHelper,
type PreflightCheckNamespacesParams,
type PreflightCheckNamespacesResult,
} from './preflight_check';
@ -28,4 +31,5 @@ export interface RepositoryHelpers {
validation: IValidationHelper;
preflight: IPreflightCheckHelper;
serializer: ISerializerHelper;
migration: IMigrationHelper;
}

View file

@ -0,0 +1,42 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import type { PublicMethodsOf } from '@kbn/utility-types';
import type { SavedObjectUnsanitizedDoc } from '@kbn/core-saved-objects-server';
import type { IKibanaMigrator } from '@kbn/core-saved-objects-base-server-internal';
export type IMigrationHelper = PublicMethodsOf<MigrationHelper>;
/**
* Repository helper for document migrations.
*/
export class MigrationHelper {
private migrator: IKibanaMigrator;
constructor({ migrator }: { migrator: IKibanaMigrator }) {
this.migrator = migrator;
}
/**
* Migrate the given SO document, throwing if a downgrade is required.
* This function is meant to be used by write APIs (create, update) for documents provided as input.
* before storing it in the index. It will therefore throw if the document is in a higher / unknown version.
*/
migrateInputDocument(document: SavedObjectUnsanitizedDoc): SavedObjectUnsanitizedDoc {
return this.migrator.migrateDocument(document, { allowDowngrade: false });
}
/**
* Migrate the given SO document, accepting downgrades.
* This function is meant to be used by read APIs (get, find) for documents fetched from the index.
* It will therefore accept downgrading the document before returning it from the API.
*/
migrateStorageDocument(document: SavedObjectUnsanitizedDoc): SavedObjectUnsanitizedDoc {
return this.migrator.migrateDocument(document, { allowDowngrade: true });
}
}

View file

@ -32,7 +32,7 @@ export const incrementCounterInternal = async <T>(
{ type, id, counterFields, options }: PerformIncrementCounterInternalParams<T>,
{ registry, helpers, client, serializer, migrator }: ApiExecutionContext
): Promise<SavedObject<T>> => {
const { common: commonHelper, preflight: preflightHelper } = helpers;
const { common: commonHelper, preflight: preflightHelper, migration: migrationHelper } = helpers;
const {
migrationVersion,
@ -97,7 +97,7 @@ export const incrementCounterInternal = async <T>(
}
// attributes: { [counterFieldName]: incrementBy },
const migrated = migrator.migrateDocument({
const migrated = migrationHelper.migrateInputDocument({
id,
type,
...(savedObjectNamespace && { namespace: savedObjectNamespace }),

View file

@ -386,8 +386,14 @@ describe('internalBulkResolve', () => {
]);
expect(migrator.migrateDocument).toHaveBeenCalledTimes(2);
expect(migrator.migrateDocument).nthCalledWith(1, 'mock-obj-for-1');
expect(migrator.migrateDocument).nthCalledWith(2, 'mock-obj-for-2');
expect(migrator.migrateDocument).nthCalledWith(
1,
'mock-obj-for-1',
expect.objectContaining({ allowDowngrade: expect.any(Boolean) })
);
expect(migrator.migrateDocument).nthCalledWith(2, 'mock-obj-for-2', {
allowDowngrade: expect.any(Boolean),
});
});
});

View file

@ -187,7 +187,7 @@ export async function internalBulkResolve<T>(
const object = getSavedObjectFromSource<T>(registry, objectType, objectId, doc, {
migrationVersionCompatibility,
});
const migrated = migrator.migrateDocument(object) as SavedObject<T>;
const migrated = migrator.migrateDocument(object, { allowDowngrade: true }) as SavedObject<T>;
if (!encryptionExtension?.isEncryptableType(migrated.type)) {
return migrated;

View file

@ -47,6 +47,7 @@ export const performUpdate = async <T>(
common: commonHelper,
encryption: encryptionHelper,
preflight: preflightHelper,
migration: migrationHelper,
} = helpers;
const { securityExtension } = extensions;
@ -108,7 +109,7 @@ export const performUpdate = async <T>(
savedObjectNamespaces = preflightResult!.savedObjectNamespaces;
}
const migrated = migrator.migrateDocument({
const migrated = migrationHelper.migrateInputDocument({
id,
type,
...(savedObjectNamespace && { namespace: savedObjectNamespace }),

View file

@ -148,7 +148,13 @@ describe('SavedObjectsRepository', () => {
const expectMigrationArgs = (args: unknown, contains = true, n = 1) => {
const obj = contains ? expect.objectContaining(args) : expect.not.objectContaining(args);
expect(migrator.migrateDocument).toHaveBeenNthCalledWith(n, obj);
expect(migrator.migrateDocument).toHaveBeenNthCalledWith(
n,
obj,
expect.objectContaining({
allowDowngrade: expect.any(Boolean),
})
);
};
beforeEach(() => {
@ -1406,8 +1412,8 @@ describe('SavedObjectsRepository', () => {
'migrated',
]);
expect(migrator.migrateDocument).toHaveBeenCalledTimes(2);
expect(migrator.migrateDocument).nthCalledWith(1, expect.objectContaining({ id: obj1.id }));
expect(migrator.migrateDocument).nthCalledWith(2, expect.objectContaining({ id: obj2.id }));
expectMigrationArgs({ id: obj1.id }, true, 1);
expectMigrationArgs({ id: obj2.id }, true, 2);
});
});
});
@ -3992,15 +3998,13 @@ describe('SavedObjectsRepository', () => {
expect(migrator.migrateDocument).toHaveBeenCalledTimes(
noNamespaceSearchResults.hits.hits.length
);
expect(migrator.migrateDocument).toHaveBeenCalledWith(
expect.objectContaining({
type,
id: noNamespaceSearchResults.hits.hits[0]._id.replace(
/(index-pattern|config|globalType)\:/,
''
),
})
);
expectMigrationArgs({
type,
id: noNamespaceSearchResults.hits.hits[0]._id.replace(
/(index-pattern|config|globalType)\:/,
''
),
});
});
});
@ -4360,12 +4364,10 @@ describe('SavedObjectsRepository', () => {
);
await expect(getSuccess(client, repository, registry, type, id)).resolves.toBe('migrated');
expect(migrator.migrateDocument).toHaveBeenCalledTimes(1);
expect(migrator.migrateDocument).toHaveBeenCalledWith(
expect.objectContaining({
id,
type,
})
);
expectMigrationArgs({
id,
type,
});
});
});

View file

@ -64,14 +64,7 @@ import {
} from '@kbn/core-saved-objects-base-server-internal';
import { PointInTimeFinder } from './point_in_time_finder';
import { createRepositoryEsClient, type RepositoryEsClient } from './repository_es_client';
import {
RepositoryHelpers,
CommonHelper,
EncryptionHelper,
ValidationHelper,
PreflightCheckHelper,
SerializerHelper,
} from './apis/helpers';
import type { RepositoryHelpers } from './apis/helpers';
import {
type ApiExecutionContext,
performCreate,
@ -93,6 +86,7 @@ import {
performUpdateObjectsSpaces,
performCollectMultiNamespaceReferences,
} from './apis';
import { createRepositoryHelpers } from './utils';
/**
* Constructor options for {@link SavedObjectsRepository}
@ -198,42 +192,16 @@ export class SavedObjectsRepository implements ISavedObjectsRepository {
this.serializer = serializer;
this.logger = logger;
this.extensions = extensions;
const commonHelper = new CommonHelper({
spaceExtension: extensions?.spacesExtension,
encryptionExtension: extensions?.encryptionExtension,
createPointInTimeFinder: this.createPointInTimeFinder.bind(this),
defaultIndex: index,
kibanaVersion: migrator.kibanaVersion,
registry: typeRegistry,
});
const encryptionHelper = new EncryptionHelper({
encryptionExtension: extensions?.encryptionExtension,
securityExtension: extensions?.securityExtension,
});
const validationHelper = new ValidationHelper({
registry: typeRegistry,
this.helpers = createRepositoryHelpers({
logger,
kibanaVersion: migrator.kibanaVersion,
});
const preflightCheckHelper = new PreflightCheckHelper({
getIndexForType: commonHelper.getIndexForType.bind(commonHelper),
createPointInTimeFinder: commonHelper.createPointInTimeFinder.bind(commonHelper),
serializer,
registry: typeRegistry,
client: this.client,
});
const serializerHelper = new SerializerHelper({
registry: typeRegistry,
index,
typeRegistry,
serializer,
extensions,
migrator,
createPointInTimeFinder: this.createPointInTimeFinder.bind(this),
});
this.helpers = {
common: commonHelper,
preflight: preflightCheckHelper,
validation: validationHelper,
encryption: encryptionHelper,
serializer: serializerHelper,
};
this.apiExecutionContext = {
client: this.client,
extensions: this.extensions,

View file

@ -0,0 +1,93 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import type { Logger } from '@kbn/logging';
import type {
IKibanaMigrator,
SavedObjectsSerializer,
} from '@kbn/core-saved-objects-base-server-internal';
import type {
ISavedObjectTypeRegistry,
SavedObjectsExtensions,
} from '@kbn/core-saved-objects-server';
import {
RepositoryHelpers,
CommonHelper,
EncryptionHelper,
ValidationHelper,
PreflightCheckHelper,
SerializerHelper,
MigrationHelper,
} from '../apis/helpers';
import type { RepositoryEsClient } from '../repository_es_client';
import { CreatePointInTimeFinderFn } from '../point_in_time_finder';
interface CreateRepositoryHelpersOptions {
index: string;
client: RepositoryEsClient;
typeRegistry: ISavedObjectTypeRegistry;
serializer: SavedObjectsSerializer;
migrator: IKibanaMigrator;
logger: Logger;
extensions?: SavedObjectsExtensions;
createPointInTimeFinder: CreatePointInTimeFinderFn;
}
export const createRepositoryHelpers = ({
logger,
extensions,
index,
client,
typeRegistry,
serializer,
migrator,
createPointInTimeFinder,
}: CreateRepositoryHelpersOptions): RepositoryHelpers => {
const commonHelper = new CommonHelper({
spaceExtension: extensions?.spacesExtension,
encryptionExtension: extensions?.encryptionExtension,
createPointInTimeFinder,
defaultIndex: index,
kibanaVersion: migrator.kibanaVersion,
registry: typeRegistry,
});
const encryptionHelper = new EncryptionHelper({
encryptionExtension: extensions?.encryptionExtension,
securityExtension: extensions?.securityExtension,
});
const validationHelper = new ValidationHelper({
registry: typeRegistry,
logger,
kibanaVersion: migrator.kibanaVersion,
});
const preflightCheckHelper = new PreflightCheckHelper({
getIndexForType: commonHelper.getIndexForType.bind(commonHelper),
createPointInTimeFinder: commonHelper.createPointInTimeFinder.bind(commonHelper),
serializer,
registry: typeRegistry,
client,
});
const serializerHelper = new SerializerHelper({
registry: typeRegistry,
serializer,
});
const migrationHelper = new MigrationHelper({
migrator,
});
const helpers: RepositoryHelpers = {
common: commonHelper,
preflight: preflightCheckHelper,
validation: validationHelper,
encryption: encryptionHelper,
serializer: serializerHelper,
migration: migrationHelper,
};
return helpers;
};

View file

@ -8,3 +8,4 @@
export { decorateEsError } from './decorate_es_error';
export { getRootFields, includedFields } from './included_fields';
export { createRepositoryHelpers } from './create_helpers';

View file

@ -13,8 +13,23 @@ import type {
ValidationHelper,
PreflightCheckHelper,
SerializerHelper,
MigrationHelper,
} from '../lib/apis/helpers';
export type MigrationHelperMock = jest.Mocked<PublicMethodsOf<MigrationHelper>>;
const createMigrationHelperMock = (): MigrationHelperMock => {
const mock: MigrationHelperMock = {
migrateInputDocument: jest.fn(),
migrateStorageDocument: jest.fn(),
};
mock.migrateInputDocument.mockImplementation((doc) => doc);
mock.migrateStorageDocument.mockImplementation((doc) => doc);
return mock;
};
export type CommonHelperMock = jest.Mocked<PublicMethodsOf<CommonHelper>>;
const createCommonHelperMock = (): CommonHelperMock => {
@ -88,6 +103,7 @@ export interface RepositoryHelpersMock {
validation: ValidationHelperMock;
preflight: PreflightCheckHelperMock;
serializer: SerializerHelperMock;
migration: MigrationHelperMock;
}
const createRepositoryHelpersMock = (): RepositoryHelpersMock => {
@ -97,6 +113,7 @@ const createRepositoryHelpersMock = (): RepositoryHelpersMock => {
validation: createValidationHelperMock(),
preflight: createPreflightCheckHelperMock(),
serializer: createSerializerHelperMock(),
migration: createMigrationHelperMock(),
};
};
@ -107,4 +124,5 @@ export const apiHelperMocks = {
createValidationHelper: createValidationHelperMock,
createSerializerHelper: createSerializerHelperMock,
createPreflightCheckHelper: createPreflightCheckHelperMock,
createMigrationHelper: createMigrationHelperMock,
};

View file

@ -0,0 +1,19 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
module.exports = {
// TODO replace the line below with
// preset: '@kbn/test/jest_integration_node
// to do so, we must fix all integration tests first
// see https://github.com/elastic/kibana/pull/130255/
preset: '@kbn/test/jest_integration',
rootDir: '../../../../../../..',
roots: ['<rootDir>/src/core/server/integration_tests/saved_objects/migrations/zdt_2'],
// must override to match all test given there is no `integration_tests` subfolder
testMatch: ['**/*.test.{js,mjs,ts,tsx}'],
};

View file

@ -0,0 +1,194 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import { pick, range } from 'lodash';
import Path from 'path';
import fs from 'fs/promises';
import { type TestElasticsearchUtils } from '@kbn/core-test-helpers-kbn-server';
import '../jest_matchers';
import { ISavedObjectsRepository } from '@kbn/core-saved-objects-api-server';
import { SavedObjectsModelVersionMap } from '@kbn/core-saved-objects-server';
import { getKibanaMigratorTestKit, startElasticsearch } from '../kibana_migrator_test_kit';
import { delay, createType } from '../test_utils';
import { getBaseMigratorParams } from '../fixtures/zdt_base.fixtures';
import { SavedObjectsBulkCreateObject } from '@kbn/core-saved-objects-api-server';
export const logFilePath = Path.join(__dirname, 'sor_higher.test.log');
describe('Higher version doc conversion', () => {
let esServer: TestElasticsearchUtils['es'];
let repositoryV1: ISavedObjectsRepository;
let repositoryV2: ISavedObjectsRepository;
const getTestType = ({ includeVersion2 }: { includeVersion2: boolean }) => {
const modelVersions: SavedObjectsModelVersionMap = {
1: {
changes: [],
schemas: {
forwardCompatibility: (attrs: any) => {
return pick(attrs, 'text', 'bool');
},
},
},
};
if (includeVersion2) {
modelVersions[2] = {
changes: [
{
type: 'data_backfill',
transform: (document) => {
document.attributes.newField = 'someValue';
return { document };
},
},
],
schemas: {
forwardCompatibility: (attrs: any) => {
return pick(attrs, 'text', 'bool', 'newField');
},
},
};
}
return createType({
name: 'test-type',
switchToModelVersionAt: '8.0.0',
modelVersions,
mappings: {
dynamic: false,
properties: {
text: { type: 'text' },
bool: { type: 'boolean' },
},
},
});
};
const createBaseline = async () => {
const testTypeV1 = getTestType({ includeVersion2: false });
const testTypeV2 = getTestType({ includeVersion2: true });
const {
runMigrations,
savedObjectsRepository: savedObjectsRepositoryV1,
client,
} = await getKibanaMigratorTestKit({
...getBaseMigratorParams(),
logFilePath,
types: [testTypeV1],
});
await runMigrations();
const sampleAObjs = range(5).map<SavedObjectsBulkCreateObject>((number) => ({
id: `doc-${number}`,
type: 'test-type',
attributes: {
text: `a_${number}`,
bool: true,
},
}));
await savedObjectsRepositoryV1.bulkCreate(sampleAObjs, { refresh: 'wait_for' });
const { runMigrations: runMigrationsAgain, savedObjectsRepository: savedObjectsRepositoryV2 } =
await getKibanaMigratorTestKit({
...getBaseMigratorParams(),
logFilePath,
types: [testTypeV2],
});
await runMigrationsAgain();
// returns the repository for v1
return { savedObjectsRepositoryV1, savedObjectsRepositoryV2, client };
};
beforeAll(async () => {
await fs.unlink(logFilePath).catch(() => {});
esServer = await startElasticsearch();
const { savedObjectsRepositoryV1: sorV1, savedObjectsRepositoryV2: sorV2 } =
await createBaseline();
repositoryV1 = sorV1;
repositoryV2 = sorV2;
});
afterAll(async () => {
await esServer?.stop();
await delay(10);
});
describe('#get', () => {
it('returns the documents with the correct shape', async () => {
const docV1 = await repositoryV1.get('test-type', 'doc-1');
expect(docV1.attributes).toEqual({
bool: true,
text: 'a_1',
});
const docV2 = await repositoryV2.get('test-type', 'doc-1');
expect(docV2.attributes).toEqual({
bool: true,
text: 'a_1',
newField: 'someValue',
});
});
});
describe('#bulkGet', () => {
it('returns the documents with the correct shape', async () => {
const docsV1 = await repositoryV1.bulkGet([{ type: 'test-type', id: 'doc-1' }]);
expect(docsV1.saved_objects[0].attributes).toEqual({
bool: true,
text: 'a_1',
});
const docV2 = await repositoryV2.bulkGet([{ type: 'test-type', id: 'doc-1' }]);
expect(docV2.saved_objects[0].attributes).toEqual({
bool: true,
text: 'a_1',
newField: 'someValue',
});
});
});
describe('#resolve', () => {
it('returns the documents with the correct shape', async () => {
const docV1 = await repositoryV1.resolve('test-type', 'doc-1');
expect(docV1.saved_object.attributes).toEqual({
bool: true,
text: 'a_1',
});
const docV2 = await repositoryV2.resolve('test-type', 'doc-1');
expect(docV2.saved_object.attributes).toEqual({
bool: true,
text: 'a_1',
newField: 'someValue',
});
});
});
describe('#bulkResolve', () => {
it('returns the documents with the correct shape', async () => {
const docsV1 = await repositoryV1.bulkResolve([{ type: 'test-type', id: 'doc-1' }]);
expect(docsV1.resolved_objects[0].saved_object.attributes).toEqual({
bool: true,
text: 'a_1',
});
const docV2 = await repositoryV2.bulkResolve([{ type: 'test-type', id: 'doc-1' }]);
expect(docV2.resolved_objects[0].saved_object.attributes).toEqual({
bool: true,
text: 'a_1',
newField: 'someValue',
});
});
});
});