mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
(cherry picked from commit 116d74ac75
)
Co-authored-by: Joe Portner <5295965+jportner@users.noreply.github.com>
This commit is contained in:
parent
15580f36a0
commit
4a711e2914
42 changed files with 2317 additions and 1374 deletions
|
@ -0,0 +1,60 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { collectSavedObjects } from './lib/collect_saved_objects';
|
||||
import type { checkReferenceOrigins } from './lib/check_reference_origins';
|
||||
import type { regenerateIds } from './lib/regenerate_ids';
|
||||
import type { validateReferences } from './lib/validate_references';
|
||||
import type { checkConflicts } from './lib/check_conflicts';
|
||||
import type { checkOriginConflicts } from './lib/check_origin_conflicts';
|
||||
import type { createSavedObjects } from './lib/create_saved_objects';
|
||||
import type { executeImportHooks } from './lib/execute_import_hooks';
|
||||
|
||||
export const mockCollectSavedObjects = jest.fn() as jest.MockedFunction<typeof collectSavedObjects>;
|
||||
jest.mock('./lib/collect_saved_objects', () => ({
|
||||
collectSavedObjects: mockCollectSavedObjects,
|
||||
}));
|
||||
|
||||
export const mockCheckReferenceOrigins = jest.fn() as jest.MockedFunction<
|
||||
typeof checkReferenceOrigins
|
||||
>;
|
||||
jest.mock('./lib/check_reference_origins', () => ({
|
||||
checkReferenceOrigins: mockCheckReferenceOrigins,
|
||||
}));
|
||||
|
||||
export const mockRegenerateIds = jest.fn() as jest.MockedFunction<typeof regenerateIds>;
|
||||
jest.mock('./lib/regenerate_ids', () => ({
|
||||
regenerateIds: mockRegenerateIds,
|
||||
}));
|
||||
|
||||
export const mockValidateReferences = jest.fn() as jest.MockedFunction<typeof validateReferences>;
|
||||
jest.mock('./lib/validate_references', () => ({
|
||||
validateReferences: mockValidateReferences,
|
||||
}));
|
||||
|
||||
export const mockCheckConflicts = jest.fn() as jest.MockedFunction<typeof checkConflicts>;
|
||||
jest.mock('./lib/check_conflicts', () => ({
|
||||
checkConflicts: mockCheckConflicts,
|
||||
}));
|
||||
|
||||
export const mockCheckOriginConflicts = jest.fn() as jest.MockedFunction<
|
||||
typeof checkOriginConflicts
|
||||
>;
|
||||
jest.mock('./lib/check_origin_conflicts', () => ({
|
||||
checkOriginConflicts: mockCheckOriginConflicts,
|
||||
}));
|
||||
|
||||
export const mockCreateSavedObjects = jest.fn() as jest.MockedFunction<typeof createSavedObjects>;
|
||||
jest.mock('./lib/create_saved_objects', () => ({
|
||||
createSavedObjects: mockCreateSavedObjects,
|
||||
}));
|
||||
|
||||
export const mockExecuteImportHooks = jest.fn() as jest.MockedFunction<typeof executeImportHooks>;
|
||||
jest.mock('./lib/execute_import_hooks', () => ({
|
||||
executeImportHooks: mockExecuteImportHooks,
|
||||
}));
|
|
@ -6,6 +6,17 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import {
|
||||
mockCollectSavedObjects,
|
||||
mockCheckReferenceOrigins,
|
||||
mockRegenerateIds,
|
||||
mockValidateReferences,
|
||||
mockCheckConflicts,
|
||||
mockCheckOriginConflicts,
|
||||
mockCreateSavedObjects,
|
||||
mockExecuteImportHooks,
|
||||
} from './import_saved_objects.test.mock';
|
||||
|
||||
import { Readable } from 'stream';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import {
|
||||
|
@ -19,52 +30,33 @@ import { ISavedObjectTypeRegistry } from '..';
|
|||
import { typeRegistryMock } from '../saved_objects_type_registry.mock';
|
||||
import { importSavedObjectsFromStream, ImportSavedObjectsOptions } from './import_saved_objects';
|
||||
import { SavedObjectsImportHook, SavedObjectsImportWarning } from './types';
|
||||
|
||||
import {
|
||||
collectSavedObjects,
|
||||
regenerateIds,
|
||||
validateReferences,
|
||||
checkConflicts,
|
||||
checkOriginConflicts,
|
||||
createSavedObjects,
|
||||
executeImportHooks,
|
||||
} from './lib';
|
||||
|
||||
jest.mock('./lib/collect_saved_objects');
|
||||
jest.mock('./lib/regenerate_ids');
|
||||
jest.mock('./lib/validate_references');
|
||||
jest.mock('./lib/check_conflicts');
|
||||
jest.mock('./lib/check_origin_conflicts');
|
||||
jest.mock('./lib/create_saved_objects');
|
||||
jest.mock('./lib/execute_import_hooks');
|
||||
|
||||
const getMockFn = <T extends (...args: any[]) => any, U>(fn: (...args: Parameters<T>) => U) =>
|
||||
fn as jest.MockedFunction<(...args: Parameters<T>) => U>;
|
||||
import type { ImportStateMap } from './lib';
|
||||
|
||||
describe('#importSavedObjectsFromStream', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
// mock empty output of each of these mocked modules so the import doesn't throw an error
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects: [],
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
});
|
||||
getMockFn(regenerateIds).mockReturnValue(new Map());
|
||||
getMockFn(validateReferences).mockResolvedValue([]);
|
||||
getMockFn(checkConflicts).mockResolvedValue({
|
||||
mockCheckReferenceOrigins.mockResolvedValue({ importStateMap: new Map() });
|
||||
mockRegenerateIds.mockReturnValue(new Map());
|
||||
mockValidateReferences.mockResolvedValue([]);
|
||||
mockCheckConflicts.mockResolvedValue({
|
||||
errors: [],
|
||||
filteredObjects: [],
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
pendingOverwrites: new Set(),
|
||||
});
|
||||
getMockFn(checkOriginConflicts).mockResolvedValue({
|
||||
mockCheckOriginConflicts.mockResolvedValue({
|
||||
errors: [],
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
pendingOverwrites: new Set(),
|
||||
});
|
||||
getMockFn(createSavedObjects).mockResolvedValue({ errors: [], createdObjects: [] });
|
||||
getMockFn(executeImportHooks).mockResolvedValue([]);
|
||||
mockCreateSavedObjects.mockResolvedValue({ errors: [], createdObjects: [] });
|
||||
mockExecuteImportHooks.mockResolvedValue([]);
|
||||
});
|
||||
|
||||
let readStream: Readable;
|
||||
|
@ -143,24 +135,57 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
await importSavedObjectsFromStream(options);
|
||||
expect(typeRegistry.getImportableAndExportableTypes).toHaveBeenCalled();
|
||||
const collectSavedObjectsOptions = { readStream, objectLimit, supportedTypes };
|
||||
expect(collectSavedObjects).toHaveBeenCalledWith(collectSavedObjectsOptions);
|
||||
expect(mockCollectSavedObjects).toHaveBeenCalledWith(collectSavedObjectsOptions);
|
||||
});
|
||||
|
||||
test('checks reference origins', async () => {
|
||||
const options = setupOptions();
|
||||
const collectedObjects = [createObject()];
|
||||
const importStateMap = new Map([
|
||||
[`${collectedObjects[0].type}:${collectedObjects[0].id}`, {}],
|
||||
[`foo:bar`, { isOnlyReference: true }],
|
||||
]);
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importStateMap,
|
||||
});
|
||||
|
||||
await importSavedObjectsFromStream(options);
|
||||
expect(mockCheckReferenceOrigins).toHaveBeenCalledWith({
|
||||
savedObjectsClient,
|
||||
typeRegistry,
|
||||
namespace,
|
||||
importStateMap,
|
||||
});
|
||||
});
|
||||
|
||||
test('validates references', async () => {
|
||||
const options = setupOptions();
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map([
|
||||
[`${collectedObjects[0].type}:${collectedObjects[0].id}`, {}],
|
||||
[`foo:bar`, { isOnlyReference: true }],
|
||||
]),
|
||||
});
|
||||
mockCheckReferenceOrigins.mockResolvedValue({
|
||||
importStateMap: new Map([[`foo:bar`, { isOnlyReference: true, id: 'baz' }]]),
|
||||
});
|
||||
|
||||
await importSavedObjectsFromStream(options);
|
||||
expect(validateReferences).toHaveBeenCalledWith(
|
||||
collectedObjects,
|
||||
expect(mockValidateReferences).toHaveBeenCalledWith({
|
||||
objects: collectedObjects,
|
||||
savedObjectsClient,
|
||||
namespace
|
||||
);
|
||||
namespace,
|
||||
importStateMap: new Map([
|
||||
// This importStateMap is a combination of the other two
|
||||
[`${collectedObjects[0].type}:${collectedObjects[0].id}`, {}],
|
||||
[`foo:bar`, { isOnlyReference: true, id: 'baz' }],
|
||||
]),
|
||||
});
|
||||
});
|
||||
|
||||
test('executes import hooks', async () => {
|
||||
|
@ -170,19 +195,19 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
|
||||
const options = setupOptions({ importHooks });
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
});
|
||||
getMockFn(createSavedObjects).mockResolvedValue({
|
||||
mockCreateSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
createdObjects: collectedObjects,
|
||||
});
|
||||
|
||||
await importSavedObjectsFromStream(options);
|
||||
|
||||
expect(executeImportHooks).toHaveBeenCalledWith({
|
||||
expect(mockExecuteImportHooks).toHaveBeenCalledWith({
|
||||
objects: collectedObjects,
|
||||
importHooks,
|
||||
});
|
||||
|
@ -192,23 +217,23 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
test('does not regenerate object IDs', async () => {
|
||||
const options = setupOptions();
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
});
|
||||
|
||||
await importSavedObjectsFromStream(options);
|
||||
expect(regenerateIds).not.toHaveBeenCalled();
|
||||
expect(mockRegenerateIds).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('checks conflicts', async () => {
|
||||
const options = setupOptions();
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
});
|
||||
|
||||
await importSavedObjectsFromStream(options);
|
||||
|
@ -218,18 +243,19 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
namespace,
|
||||
ignoreRegularConflicts: overwrite,
|
||||
};
|
||||
expect(checkConflicts).toHaveBeenCalledWith(checkConflictsParams);
|
||||
expect(mockCheckConflicts).toHaveBeenCalledWith(checkConflictsParams);
|
||||
});
|
||||
|
||||
test('checks origin conflicts', async () => {
|
||||
const options = setupOptions();
|
||||
const filteredObjects = [createObject()];
|
||||
const importIdMap = new Map();
|
||||
getMockFn(checkConflicts).mockResolvedValue({
|
||||
const importStateMap = new Map();
|
||||
const pendingOverwrites = new Set<string>();
|
||||
mockCheckConflicts.mockResolvedValue({
|
||||
errors: [],
|
||||
filteredObjects,
|
||||
importIdMap,
|
||||
pendingOverwrites: new Set(),
|
||||
importStateMap,
|
||||
pendingOverwrites,
|
||||
});
|
||||
|
||||
await importSavedObjectsFromStream(options);
|
||||
|
@ -239,9 +265,10 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
typeRegistry,
|
||||
namespace,
|
||||
ignoreRegularConflicts: overwrite,
|
||||
importIdMap,
|
||||
importStateMap,
|
||||
pendingOverwrites,
|
||||
};
|
||||
expect(checkOriginConflicts).toHaveBeenCalledWith(checkOriginConflictsParams);
|
||||
expect(mockCheckOriginConflicts).toHaveBeenCalledWith(checkOriginConflictsParams);
|
||||
});
|
||||
|
||||
test('creates saved objects', async () => {
|
||||
|
@ -249,43 +276,47 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
const collectedObjects = [createObject()];
|
||||
const filteredObjects = [createObject()];
|
||||
const errors = [createError(), createError(), createError(), createError()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [errors[0]],
|
||||
collectedObjects,
|
||||
importIdMap: new Map([
|
||||
importStateMap: new Map([
|
||||
['foo', {}],
|
||||
['bar', {}],
|
||||
['baz', {}],
|
||||
['baz', { isOnlyReference: true }],
|
||||
]),
|
||||
});
|
||||
getMockFn(validateReferences).mockResolvedValue([errors[1]]);
|
||||
getMockFn(checkConflicts).mockResolvedValue({
|
||||
mockCheckReferenceOrigins.mockResolvedValue({
|
||||
importStateMap: new Map([['baz', { isOnlyReference: true, destinationId: 'newId1' }]]),
|
||||
});
|
||||
mockValidateReferences.mockResolvedValue([errors[1]]);
|
||||
mockCheckConflicts.mockResolvedValue({
|
||||
errors: [errors[2]],
|
||||
filteredObjects,
|
||||
importIdMap: new Map([['bar', { id: 'newId1' }]]),
|
||||
importStateMap: new Map([['foo', { destinationId: 'newId2' }]]),
|
||||
pendingOverwrites: new Set(),
|
||||
});
|
||||
getMockFn(checkOriginConflicts).mockResolvedValue({
|
||||
mockCheckOriginConflicts.mockResolvedValue({
|
||||
errors: [errors[3]],
|
||||
importIdMap: new Map([['baz', { id: 'newId2' }]]),
|
||||
importStateMap: new Map([['bar', { destinationId: 'newId3' }]]),
|
||||
pendingOverwrites: new Set(),
|
||||
});
|
||||
|
||||
await importSavedObjectsFromStream(options);
|
||||
const importIdMap = new Map([
|
||||
['foo', {}],
|
||||
['bar', { id: 'newId1' }],
|
||||
['baz', { id: 'newId2' }],
|
||||
// assert that the importStateMap is correctly composed of the results from the four modules
|
||||
const importStateMap = new Map([
|
||||
['foo', { destinationId: 'newId2' }],
|
||||
['bar', { destinationId: 'newId3' }],
|
||||
['baz', { isOnlyReference: true, destinationId: 'newId1' }],
|
||||
]);
|
||||
const createSavedObjectsParams = {
|
||||
objects: collectedObjects,
|
||||
accumulatedErrors: errors,
|
||||
savedObjectsClient,
|
||||
importIdMap,
|
||||
importStateMap,
|
||||
overwrite,
|
||||
namespace,
|
||||
};
|
||||
expect(createSavedObjects).toHaveBeenCalledWith(createSavedObjectsParams);
|
||||
expect(mockCreateSavedObjects).toHaveBeenCalledWith(createSavedObjectsParams);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -293,52 +324,58 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
test('regenerates object IDs', async () => {
|
||||
const options = setupOptions({ createNewCopies: true });
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
});
|
||||
|
||||
await importSavedObjectsFromStream(options);
|
||||
expect(regenerateIds).toHaveBeenCalledWith(collectedObjects);
|
||||
expect(mockRegenerateIds).toHaveBeenCalledWith(collectedObjects);
|
||||
});
|
||||
|
||||
test('does not check conflicts or check origin conflicts', async () => {
|
||||
const options = setupOptions({ createNewCopies: true });
|
||||
getMockFn(validateReferences).mockResolvedValue([]);
|
||||
mockValidateReferences.mockResolvedValue([]);
|
||||
|
||||
await importSavedObjectsFromStream(options);
|
||||
expect(checkConflicts).not.toHaveBeenCalled();
|
||||
expect(checkOriginConflicts).not.toHaveBeenCalled();
|
||||
expect(mockCheckConflicts).not.toHaveBeenCalled();
|
||||
expect(mockCheckOriginConflicts).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('creates saved objects', async () => {
|
||||
const options = setupOptions({ createNewCopies: true });
|
||||
const collectedObjects = [createObject()];
|
||||
const errors = [createError(), createError()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [errors[0]],
|
||||
collectedObjects,
|
||||
importIdMap: new Map([
|
||||
importStateMap: new Map([
|
||||
['foo', {}],
|
||||
['bar', {}],
|
||||
['bar', { isOnlyReference: true }],
|
||||
]),
|
||||
});
|
||||
getMockFn(validateReferences).mockResolvedValue([errors[1]]);
|
||||
// this importIdMap is not composed with the one obtained from `collectSavedObjects`
|
||||
const importIdMap = new Map().set(`id1`, { id: `newId1` });
|
||||
getMockFn(regenerateIds).mockReturnValue(importIdMap);
|
||||
mockCheckReferenceOrigins.mockResolvedValue({
|
||||
importStateMap: new Map([['bar', { isOnlyReference: true, destinationId: 'newId' }]]),
|
||||
});
|
||||
mockValidateReferences.mockResolvedValue([errors[1]]);
|
||||
mockRegenerateIds.mockReturnValue(new Map([['foo', { destinationId: `randomId1` }]]));
|
||||
|
||||
await importSavedObjectsFromStream(options);
|
||||
// assert that the importStateMap is correctly composed of the results from the three modules
|
||||
const importStateMap: ImportStateMap = new Map([
|
||||
['foo', { destinationId: `randomId1` }],
|
||||
['bar', { isOnlyReference: true, destinationId: 'newId' }],
|
||||
]);
|
||||
const createSavedObjectsParams = {
|
||||
objects: collectedObjects,
|
||||
accumulatedErrors: errors,
|
||||
savedObjectsClient,
|
||||
importIdMap,
|
||||
importStateMap,
|
||||
overwrite,
|
||||
namespace,
|
||||
};
|
||||
expect(createSavedObjects).toHaveBeenCalledWith(createSavedObjectsParams);
|
||||
expect(mockCreateSavedObjects).toHaveBeenCalledWith(createSavedObjectsParams);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -353,10 +390,10 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
|
||||
test('returns success=false if an error occurred', async () => {
|
||||
const options = setupOptions();
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [createError()],
|
||||
collectedObjects: [],
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
});
|
||||
|
||||
const result = await importSavedObjectsFromStream(options);
|
||||
|
@ -371,18 +408,18 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
test('returns warnings from the import hooks', async () => {
|
||||
const options = setupOptions();
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
});
|
||||
getMockFn(createSavedObjects).mockResolvedValue({
|
||||
mockCreateSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
createdObjects: collectedObjects,
|
||||
});
|
||||
|
||||
const warnings: SavedObjectsImportWarning[] = [{ type: 'simple', message: 'foo' }];
|
||||
getMockFn(executeImportHooks).mockResolvedValue(warnings);
|
||||
mockExecuteImportHooks.mockResolvedValue(warnings);
|
||||
|
||||
const result = await importSavedObjectsFromStream(options);
|
||||
|
||||
|
@ -419,16 +456,16 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
|
||||
test('with createNewCopies disabled', async () => {
|
||||
const options = setupOptions();
|
||||
getMockFn(checkConflicts).mockResolvedValue({
|
||||
mockCheckConflicts.mockResolvedValue({
|
||||
errors: [],
|
||||
filteredObjects: [],
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
pendingOverwrites: new Set([
|
||||
`${success2.type}:${success2.id}`, // the success2 object was overwritten
|
||||
`${error2.type}:${error2.id}`, // an attempt was made to overwrite the error2 object
|
||||
]),
|
||||
});
|
||||
getMockFn(createSavedObjects).mockResolvedValue({ errors, createdObjects });
|
||||
mockCreateSavedObjects.mockResolvedValue({ errors, createdObjects });
|
||||
|
||||
const result = await importSavedObjectsFromStream(options);
|
||||
// successResults only includes the imported object's type, id, and destinationId (if a new one was generated)
|
||||
|
@ -457,7 +494,7 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
test('with createNewCopies enabled', async () => {
|
||||
// however, we include it here for posterity
|
||||
const options = setupOptions({ createNewCopies: true });
|
||||
getMockFn(createSavedObjects).mockResolvedValue({ errors, createdObjects });
|
||||
mockCreateSavedObjects.mockResolvedValue({ errors, createdObjects });
|
||||
|
||||
const result = await importSavedObjectsFromStream(options);
|
||||
// successResults only includes the imported object's type, id, and destinationId (if a new one was generated)
|
||||
|
@ -495,13 +532,13 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
},
|
||||
});
|
||||
|
||||
getMockFn(checkConflicts).mockResolvedValue({
|
||||
mockCheckConflicts.mockResolvedValue({
|
||||
errors: [],
|
||||
filteredObjects: [],
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
pendingOverwrites: new Set(),
|
||||
});
|
||||
getMockFn(createSavedObjects).mockResolvedValue({ errors: [], createdObjects: [obj1, obj2] });
|
||||
mockCreateSavedObjects.mockResolvedValue({ errors: [], createdObjects: [obj1, obj2] });
|
||||
|
||||
const result = await importSavedObjectsFromStream(options);
|
||||
// successResults only includes the imported object's type, id, and destinationId (if a new one was generated)
|
||||
|
@ -529,24 +566,24 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
test('accumulates multiple errors', async () => {
|
||||
const options = setupOptions();
|
||||
const errors = [createError(), createError(), createError(), createError(), createError()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [errors[0]],
|
||||
collectedObjects: [],
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
});
|
||||
getMockFn(validateReferences).mockResolvedValue([errors[1]]);
|
||||
getMockFn(checkConflicts).mockResolvedValue({
|
||||
mockValidateReferences.mockResolvedValue([errors[1]]);
|
||||
mockCheckConflicts.mockResolvedValue({
|
||||
errors: [errors[2]],
|
||||
filteredObjects: [],
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
pendingOverwrites: new Set(),
|
||||
});
|
||||
getMockFn(checkOriginConflicts).mockResolvedValue({
|
||||
mockCheckOriginConflicts.mockResolvedValue({
|
||||
errors: [errors[3]],
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
pendingOverwrites: new Set(),
|
||||
});
|
||||
getMockFn(createSavedObjects).mockResolvedValue({ errors: [errors[4]], createdObjects: [] });
|
||||
mockCreateSavedObjects.mockResolvedValue({ errors: [errors[4]], createdObjects: [] });
|
||||
|
||||
const result = await importSavedObjectsFromStream(options);
|
||||
const expectedErrors = errors.map(({ type, id }) => expect.objectContaining({ type, id }));
|
||||
|
|
|
@ -15,6 +15,7 @@ import {
|
|||
SavedObjectsImportHook,
|
||||
} from './types';
|
||||
import {
|
||||
checkReferenceOrigins,
|
||||
validateReferences,
|
||||
checkOriginConflicts,
|
||||
createSavedObjects,
|
||||
|
@ -72,20 +73,34 @@ export async function importSavedObjectsFromStream({
|
|||
supportedTypes,
|
||||
});
|
||||
errorAccumulator = [...errorAccumulator, ...collectSavedObjectsResult.errors];
|
||||
/** Map of all IDs for objects that we are attempting to import; each value is empty by default */
|
||||
let importIdMap = collectSavedObjectsResult.importIdMap;
|
||||
// Map of all IDs for objects that we are attempting to import, and any references that are not included in the read stream;
|
||||
// each value is empty by default
|
||||
let importStateMap = collectSavedObjectsResult.importStateMap;
|
||||
let pendingOverwrites = new Set<string>();
|
||||
|
||||
// Validate references
|
||||
const validateReferencesResult = await validateReferences(
|
||||
collectSavedObjectsResult.collectedObjects,
|
||||
// Check any references that aren't included in the import file and retries, to see if they have a match with a different origin
|
||||
const checkReferenceOriginsResult = await checkReferenceOrigins({
|
||||
savedObjectsClient,
|
||||
namespace
|
||||
);
|
||||
typeRegistry,
|
||||
namespace,
|
||||
importStateMap,
|
||||
});
|
||||
importStateMap = new Map([...importStateMap, ...checkReferenceOriginsResult.importStateMap]);
|
||||
|
||||
// Validate references
|
||||
const validateReferencesResult = await validateReferences({
|
||||
objects: collectSavedObjectsResult.collectedObjects,
|
||||
savedObjectsClient,
|
||||
namespace,
|
||||
importStateMap,
|
||||
});
|
||||
errorAccumulator = [...errorAccumulator, ...validateReferencesResult];
|
||||
|
||||
if (createNewCopies) {
|
||||
importIdMap = regenerateIds(collectSavedObjectsResult.collectedObjects);
|
||||
importStateMap = new Map([
|
||||
...importStateMap, // preserve any entries for references that aren't included in collectedObjects
|
||||
...regenerateIds(collectSavedObjectsResult.collectedObjects),
|
||||
]);
|
||||
} else {
|
||||
// Check single-namespace objects for conflicts in this namespace, and check multi-namespace objects for conflicts across all namespaces
|
||||
const checkConflictsParams = {
|
||||
|
@ -96,7 +111,7 @@ export async function importSavedObjectsFromStream({
|
|||
};
|
||||
const checkConflictsResult = await checkConflicts(checkConflictsParams);
|
||||
errorAccumulator = [...errorAccumulator, ...checkConflictsResult.errors];
|
||||
importIdMap = new Map([...importIdMap, ...checkConflictsResult.importIdMap]);
|
||||
importStateMap = new Map([...importStateMap, ...checkConflictsResult.importStateMap]);
|
||||
pendingOverwrites = checkConflictsResult.pendingOverwrites;
|
||||
|
||||
// Check multi-namespace object types for origin conflicts in this namespace
|
||||
|
@ -106,11 +121,12 @@ export async function importSavedObjectsFromStream({
|
|||
typeRegistry,
|
||||
namespace,
|
||||
ignoreRegularConflicts: overwrite,
|
||||
importIdMap,
|
||||
importStateMap,
|
||||
pendingOverwrites,
|
||||
};
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(checkOriginConflictsParams);
|
||||
errorAccumulator = [...errorAccumulator, ...checkOriginConflictsResult.errors];
|
||||
importIdMap = new Map([...importIdMap, ...checkOriginConflictsResult.importIdMap]);
|
||||
importStateMap = new Map([...importStateMap, ...checkOriginConflictsResult.importStateMap]);
|
||||
pendingOverwrites = new Set([
|
||||
...pendingOverwrites,
|
||||
...checkOriginConflictsResult.pendingOverwrites,
|
||||
|
@ -122,7 +138,7 @@ export async function importSavedObjectsFromStream({
|
|||
objects: collectSavedObjectsResult.collectedObjects,
|
||||
accumulatedErrors: errorAccumulator,
|
||||
savedObjectsClient,
|
||||
importIdMap,
|
||||
importStateMap,
|
||||
overwrite,
|
||||
namespace,
|
||||
};
|
||||
|
|
|
@ -6,13 +6,16 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { mockUuidv4 } from './__mocks__';
|
||||
import { savedObjectsClientMock } from '../../../mocks';
|
||||
import { SavedObjectReference, SavedObjectsImportRetry } from 'kibana/public';
|
||||
import { SavedObjectsClientContract, SavedObject } from '../../types';
|
||||
import { SavedObjectsErrorHelpers } from '../../service';
|
||||
import { checkConflicts } from './check_conflicts';
|
||||
|
||||
jest.mock('uuid', () => ({
|
||||
v4: () => 'uuidv4',
|
||||
}));
|
||||
|
||||
type SavedObjectType = SavedObject<{ title?: string }>;
|
||||
type CheckConflictsParams = Parameters<typeof checkConflicts>[0];
|
||||
|
||||
|
@ -71,11 +74,6 @@ describe('#checkConflicts', () => {
|
|||
return { ...partial, savedObjectsClient };
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
mockUuidv4.mockReset();
|
||||
mockUuidv4.mockReturnValueOnce(`new-object-id`);
|
||||
});
|
||||
|
||||
it('exits early if there are no objects to check', async () => {
|
||||
const namespace = 'foo-namespace';
|
||||
const params = setupParams({ objects: [], namespace });
|
||||
|
@ -85,7 +83,7 @@ describe('#checkConflicts', () => {
|
|||
expect(checkConflictsResult).toEqual({
|
||||
filteredObjects: [],
|
||||
errors: [],
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
pendingOverwrites: new Set(),
|
||||
});
|
||||
});
|
||||
|
@ -121,7 +119,7 @@ describe('#checkConflicts', () => {
|
|||
error: { ...obj4Error.error, type: 'unknown' },
|
||||
},
|
||||
],
|
||||
importIdMap: new Map([[`${obj3.type}:${obj3.id}`, { id: `new-object-id` }]]),
|
||||
importStateMap: new Map([[`${obj3.type}:${obj3.id}`, { destinationId: 'uuidv4' }]]),
|
||||
pendingOverwrites: new Set(),
|
||||
});
|
||||
});
|
||||
|
@ -187,14 +185,14 @@ describe('#checkConflicts', () => {
|
|||
error: { ...obj4Error.error, type: 'unknown' },
|
||||
},
|
||||
],
|
||||
importIdMap: new Map([
|
||||
[`${obj3.type}:${obj3.id}`, { id: `new-object-id`, omitOriginId: true }],
|
||||
importStateMap: new Map([
|
||||
[`${obj3.type}:${obj3.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
]),
|
||||
pendingOverwrites: new Set([`${obj5.type}:${obj5.id}`]),
|
||||
});
|
||||
});
|
||||
|
||||
it('adds `omitOriginId` field to `importIdMap` entries when createNewCopies=true', async () => {
|
||||
it('adds `omitOriginId` field to `importStateMap` entries when createNewCopies=true', async () => {
|
||||
const namespace = 'foo-namespace';
|
||||
const params = setupParams({ objects, namespace, createNewCopies: true });
|
||||
socCheckConflicts.mockResolvedValue({ errors: [obj2Error, obj3Error, obj4Error] });
|
||||
|
@ -202,8 +200,8 @@ describe('#checkConflicts', () => {
|
|||
const checkConflictsResult = await checkConflicts(params);
|
||||
expect(checkConflictsResult).toEqual(
|
||||
expect.objectContaining({
|
||||
importIdMap: new Map([
|
||||
[`${obj3.type}:${obj3.id}`, { id: `new-object-id`, omitOriginId: true }],
|
||||
importStateMap: new Map([
|
||||
[`${obj3.type}:${obj3.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
]),
|
||||
})
|
||||
);
|
||||
|
|
|
@ -14,6 +14,7 @@ import {
|
|||
SavedObjectError,
|
||||
SavedObjectsImportRetry,
|
||||
} from '../../types';
|
||||
import type { ImportStateMap } from './types';
|
||||
|
||||
interface CheckConflictsParams {
|
||||
objects: Array<SavedObject<{ title?: string }>>;
|
||||
|
@ -37,12 +38,12 @@ export async function checkConflicts({
|
|||
}: CheckConflictsParams) {
|
||||
const filteredObjects: Array<SavedObject<{ title?: string }>> = [];
|
||||
const errors: SavedObjectsImportFailure[] = [];
|
||||
const importIdMap = new Map<string, { id?: string; omitOriginId?: boolean }>();
|
||||
const importStateMap: ImportStateMap = new Map();
|
||||
const pendingOverwrites = new Set<string>();
|
||||
|
||||
// exit early if there are no objects to check
|
||||
if (objects.length === 0) {
|
||||
return { filteredObjects, errors, importIdMap, pendingOverwrites };
|
||||
return { filteredObjects, errors, importStateMap, pendingOverwrites };
|
||||
}
|
||||
|
||||
const retryMap = retries.reduce(
|
||||
|
@ -76,7 +77,7 @@ export async function checkConflicts({
|
|||
// This code path should not be triggered for a retry, but in case the consumer is using the import APIs incorrectly and attempting to
|
||||
// retry an object with a destinationId that would result in an unresolvable conflict, we regenerate the ID here as a fail-safe.
|
||||
const omitOriginId = createNewCopies || createNewCopy;
|
||||
importIdMap.set(`${type}:${id}`, { id: uuidv4(), omitOriginId });
|
||||
importStateMap.set(`${type}:${id}`, { destinationId: uuidv4(), omitOriginId });
|
||||
filteredObjects.push(object);
|
||||
} else if (errorObj && errorObj.statusCode !== 409) {
|
||||
errors.push({ type, id, title, meta: { title }, error: { ...errorObj, type: 'unknown' } });
|
||||
|
@ -90,5 +91,5 @@ export async function checkConflicts({
|
|||
}
|
||||
}
|
||||
});
|
||||
return { filteredObjects, errors, importIdMap, pendingOverwrites };
|
||||
return { filteredObjects, errors, importStateMap, pendingOverwrites };
|
||||
}
|
||||
|
|
|
@ -6,9 +6,9 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
const mockUuidv4 = jest.fn().mockReturnValue('uuidv4');
|
||||
jest.mock('uuid', () => ({
|
||||
v4: mockUuidv4,
|
||||
}));
|
||||
import type { createOriginQuery } from './utils';
|
||||
|
||||
export { mockUuidv4 };
|
||||
export const mockCreateOriginQuery = jest.fn() as jest.MockedFunction<typeof createOriginQuery>;
|
||||
jest.mock('./utils', () => ({
|
||||
createOriginQuery: mockCreateOriginQuery,
|
||||
}));
|
|
@ -6,18 +6,23 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { mockUuidv4 } from './__mocks__';
|
||||
import { mockCreateOriginQuery } from './check_reference_origins.test.mock';
|
||||
|
||||
import {
|
||||
SavedObjectsClientContract,
|
||||
SavedObjectReference,
|
||||
SavedObject,
|
||||
SavedObjectsImportRetry,
|
||||
SavedObjectsImportFailure,
|
||||
} from '../../types';
|
||||
import { checkOriginConflicts, getImportIdMapForRetries } from './check_origin_conflicts';
|
||||
import { checkOriginConflicts } from './check_origin_conflicts';
|
||||
import { savedObjectsClientMock } from '../../../mocks';
|
||||
import { typeRegistryMock } from '../../saved_objects_type_registry.mock';
|
||||
import { ISavedObjectTypeRegistry } from '../../saved_objects_type_registry';
|
||||
import type { ImportStateMap } from './types';
|
||||
|
||||
jest.mock('uuid', () => ({
|
||||
v4: () => 'uuidv4',
|
||||
}));
|
||||
|
||||
type SavedObjectType = SavedObject<{ title?: string }>;
|
||||
type CheckOriginConflictsParams = Parameters<typeof checkOriginConflicts>[0];
|
||||
|
@ -42,10 +47,6 @@ const createObject = (
|
|||
const MULTI_NS_TYPE = 'multi';
|
||||
const OTHER_TYPE = 'other';
|
||||
|
||||
beforeEach(() => {
|
||||
mockUuidv4.mockClear();
|
||||
});
|
||||
|
||||
describe('#checkOriginConflicts', () => {
|
||||
let savedObjectsClient: jest.Mocked<SavedObjectsClientContract>;
|
||||
let typeRegistry: jest.Mocked<ISavedObjectTypeRegistry>;
|
||||
|
@ -61,8 +62,9 @@ describe('#checkOriginConflicts', () => {
|
|||
const setupParams = (partial: {
|
||||
objects: SavedObjectType[];
|
||||
namespace?: string;
|
||||
importIdMap?: Map<string, unknown>;
|
||||
ignoreRegularConflicts?: boolean;
|
||||
importStateMap?: ImportStateMap;
|
||||
pendingOverwrites?: Set<string>;
|
||||
}): CheckOriginConflictsParams => {
|
||||
savedObjectsClient = savedObjectsClientMock.create();
|
||||
find = savedObjectsClient.find;
|
||||
|
@ -70,7 +72,8 @@ describe('#checkOriginConflicts', () => {
|
|||
typeRegistry = typeRegistryMock.create();
|
||||
typeRegistry.isMultiNamespace.mockImplementation((type) => type === MULTI_NS_TYPE);
|
||||
return {
|
||||
importIdMap: new Map<string, unknown>(), // empty by default
|
||||
importStateMap: new Map(), // empty by default
|
||||
pendingOverwrites: new Set<string>(), // empty by default
|
||||
...partial,
|
||||
savedObjectsClient,
|
||||
typeRegistry,
|
||||
|
@ -82,19 +85,21 @@ describe('#checkOriginConflicts', () => {
|
|||
};
|
||||
|
||||
describe('cluster calls', () => {
|
||||
beforeEach(() => {
|
||||
mockCreateOriginQuery.mockClear();
|
||||
});
|
||||
|
||||
const multiNsObj = createObject(MULTI_NS_TYPE, 'id-1');
|
||||
const multiNsObjWithOriginId = createObject(MULTI_NS_TYPE, 'id-2', 'originId-foo');
|
||||
const otherObj = createObject(OTHER_TYPE, 'id-3');
|
||||
// non-multi-namespace types shouldn't have origin IDs, but we include a test case to ensure it's handled gracefully
|
||||
const otherObjWithOriginId = createObject(OTHER_TYPE, 'id-4', 'originId-bar');
|
||||
|
||||
const expectFindArgs = (n: number, object: SavedObject, rawIdPrefix: string) => {
|
||||
const { type, id, originId } = object;
|
||||
const search = `"${rawIdPrefix}${type}:${originId || id}" | "${originId || id}"`; // this template works for our basic test cases
|
||||
const expectedArgs = expect.objectContaining({ type, search });
|
||||
// exclude rootSearchFields, page, perPage, and fields attributes from assertion -- these are constant
|
||||
const expectFindArgs = (n: number, object: SavedObject) => {
|
||||
const idToCheck = object.originId || object.id;
|
||||
expect(mockCreateOriginQuery).toHaveBeenNthCalledWith(n, object.type, idToCheck);
|
||||
// exclude namespace from assertion -- a separate test covers that
|
||||
expect(find).toHaveBeenNthCalledWith(n, expectedArgs);
|
||||
expect(find).toHaveBeenNthCalledWith(n, expect.objectContaining({ type: object.type }));
|
||||
};
|
||||
|
||||
test('does not execute searches for non-multi-namespace objects', async () => {
|
||||
|
@ -105,21 +110,26 @@ describe('#checkOriginConflicts', () => {
|
|||
expect(find).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('does not execute searches for multi-namespace objects that already have pending overwrites (exact match conflicts)', async () => {
|
||||
const objects = [multiNsObj, multiNsObjWithOriginId];
|
||||
const pendingOverwrites = new Set([
|
||||
`${multiNsObj.type}:${multiNsObj.id}`,
|
||||
`${multiNsObjWithOriginId.type}:${multiNsObjWithOriginId.id}`,
|
||||
]);
|
||||
const params = setupParams({ objects, pendingOverwrites });
|
||||
|
||||
await checkOriginConflicts(params);
|
||||
expect(find).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('executes searches for multi-namespace objects', async () => {
|
||||
const objects = [multiNsObj, otherObj, multiNsObjWithOriginId, otherObjWithOriginId];
|
||||
const params1 = setupParams({ objects });
|
||||
|
||||
await checkOriginConflicts(params1);
|
||||
expect(find).toHaveBeenCalledTimes(2);
|
||||
expectFindArgs(1, multiNsObj, '');
|
||||
expectFindArgs(2, multiNsObjWithOriginId, '');
|
||||
|
||||
find.mockClear();
|
||||
const params2 = setupParams({ objects, namespace: 'some-namespace' });
|
||||
await checkOriginConflicts(params2);
|
||||
expect(find).toHaveBeenCalledTimes(2);
|
||||
expectFindArgs(1, multiNsObj, 'some-namespace:');
|
||||
expectFindArgs(2, multiNsObjWithOriginId, 'some-namespace:');
|
||||
expectFindArgs(1, multiNsObj);
|
||||
expectFindArgs(2, multiNsObjWithOriginId);
|
||||
});
|
||||
|
||||
test('searches within the current `namespace`', async () => {
|
||||
|
@ -131,22 +141,6 @@ describe('#checkOriginConflicts', () => {
|
|||
expect(find).toHaveBeenCalledTimes(1);
|
||||
expect(find).toHaveBeenCalledWith(expect.objectContaining({ namespaces: [namespace] }));
|
||||
});
|
||||
|
||||
test('search query escapes quote and backslash characters in `id` and/or `originId`', async () => {
|
||||
const weirdId = `some"weird\\id`;
|
||||
const objects = [
|
||||
createObject(MULTI_NS_TYPE, weirdId),
|
||||
createObject(MULTI_NS_TYPE, 'some-id', weirdId),
|
||||
];
|
||||
const params = setupParams({ objects });
|
||||
|
||||
await checkOriginConflicts(params);
|
||||
const escapedId = `some\\"weird\\\\id`;
|
||||
const expectedQuery = `"${MULTI_NS_TYPE}:${escapedId}" | "${escapedId}"`;
|
||||
expect(find).toHaveBeenCalledTimes(2);
|
||||
expect(find).toHaveBeenNthCalledWith(1, expect.objectContaining({ search: expectedQuery }));
|
||||
expect(find).toHaveBeenNthCalledWith(2, expect.objectContaining({ search: expectedQuery }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('results', () => {
|
||||
|
@ -183,7 +177,35 @@ describe('#checkOriginConflicts', () => {
|
|||
},
|
||||
});
|
||||
|
||||
describe('object result without a `importIdMap` entry (no match or exact match)', () => {
|
||||
test('filters inexact matches of other objects that are being imported, but does not filter inexact matches of references that are not being imported', async () => {
|
||||
// obj1, obj2, and obj3 exist in this space, and obj1 has references to both obj2 and obj3
|
||||
// try to import obj1, obj2, and obj4; simulating a scenario where obj1 and obj2 were filtered out during `checkConflicts`, so we only call `checkOriginConflicts` with the remainder
|
||||
const obj1 = createObject(MULTI_NS_TYPE, 'id-1');
|
||||
const obj2 = createObject(MULTI_NS_TYPE, 'id-2', 'some-originId');
|
||||
const obj3 = createObject(MULTI_NS_TYPE, 'id-3', 'some-originId');
|
||||
const obj4 = createObject(MULTI_NS_TYPE, 'id-4', 'some-originId');
|
||||
const objects = [obj4];
|
||||
const params = setupParams({
|
||||
objects,
|
||||
importStateMap: new Map([
|
||||
[`${obj1.type}:${obj1.id}`, {}],
|
||||
[`${obj2.type}:${obj2.id}`, {}],
|
||||
[`${obj3.type}:${obj3.id}`, { isOnlyReference: true }], // this attribute signifies that there is a reference to this object, but it is not present in the collected objects from the import file
|
||||
[`${obj4.type}:${obj4.id}`, {}],
|
||||
]),
|
||||
});
|
||||
mockFindResult(obj2, obj3); // find for obj4: the result is an inexact match with two destinations, one of which is exactly matched by obj2 -- accordingly, obj4 has an inexact match to obj3
|
||||
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importStateMap: new Map(),
|
||||
errors: [createConflictError(obj4, obj3.id)],
|
||||
pendingOverwrites: new Set(),
|
||||
};
|
||||
expect(checkOriginConflictsResult).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
describe('object result without a `importStateMap` entry (no match or exact match)', () => {
|
||||
test('returns object when no match is detected (0 hits)', async () => {
|
||||
// no objects exist in this space
|
||||
// try to import obj1, obj2, obj3, and obj4
|
||||
|
@ -198,7 +220,7 @@ describe('#checkOriginConflicts', () => {
|
|||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
|
||||
const expectedResult = {
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
errors: [],
|
||||
pendingOverwrites: new Set(),
|
||||
};
|
||||
|
@ -215,7 +237,7 @@ describe('#checkOriginConflicts', () => {
|
|||
const objects = [obj2, obj4];
|
||||
const params = setupParams({
|
||||
objects,
|
||||
importIdMap: new Map([
|
||||
importStateMap: new Map([
|
||||
[`${obj1.type}:${obj1.id}`, {}],
|
||||
[`${obj2.type}:${obj2.id}`, {}],
|
||||
[`${obj3.type}:${obj3.id}`, {}],
|
||||
|
@ -227,7 +249,7 @@ describe('#checkOriginConflicts', () => {
|
|||
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
errors: [],
|
||||
pendingOverwrites: new Set(),
|
||||
};
|
||||
|
@ -243,7 +265,7 @@ describe('#checkOriginConflicts', () => {
|
|||
const objects = [obj3];
|
||||
const params = setupParams({
|
||||
objects,
|
||||
importIdMap: new Map([
|
||||
importStateMap: new Map([
|
||||
[`${obj1.type}:${obj1.id}`, {}],
|
||||
[`${obj2.type}:${obj2.id}`, {}],
|
||||
[`${obj3.type}:${obj3.id}`, {}],
|
||||
|
@ -253,7 +275,7 @@ describe('#checkOriginConflicts', () => {
|
|||
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
errors: [],
|
||||
pendingOverwrites: new Set(),
|
||||
};
|
||||
|
@ -261,7 +283,7 @@ describe('#checkOriginConflicts', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('object result with a `importIdMap` entry (partial match with a single destination)', () => {
|
||||
describe('object result with a `importStateMap` entry (partial match with a single destination)', () => {
|
||||
describe('when an inexact match is detected (1 hit)', () => {
|
||||
// objA and objB exist in this space
|
||||
// try to import obj1 and obj2
|
||||
|
@ -282,20 +304,20 @@ describe('#checkOriginConflicts', () => {
|
|||
const params = setup(false);
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
errors: [createConflictError(obj1, objA.id), createConflictError(obj2, objB.id)],
|
||||
pendingOverwrites: new Set(),
|
||||
};
|
||||
expect(checkOriginConflictsResult).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
test('returns object with a `importIdMap` entry when ignoreRegularConflicts=true', async () => {
|
||||
test('returns object with a `importStateMap` entry when ignoreRegularConflicts=true', async () => {
|
||||
const params = setup(true);
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importIdMap: new Map([
|
||||
[`${obj1.type}:${obj1.id}`, { id: objA.id }],
|
||||
[`${obj2.type}:${obj2.id}`, { id: objB.id }],
|
||||
importStateMap: new Map([
|
||||
[`${obj1.type}:${obj1.id}`, { destinationId: objA.id }],
|
||||
[`${obj2.type}:${obj2.id}`, { destinationId: objB.id }],
|
||||
]),
|
||||
errors: [],
|
||||
pendingOverwrites: new Set([`${obj1.type}:${obj1.id}`, `${obj2.type}:${obj2.id}`]),
|
||||
|
@ -319,7 +341,7 @@ describe('#checkOriginConflicts', () => {
|
|||
const params = setupParams({
|
||||
objects,
|
||||
ignoreRegularConflicts,
|
||||
importIdMap: new Map([
|
||||
importStateMap: new Map([
|
||||
[`${obj1.type}:${obj1.id}`, {}],
|
||||
[`${obj2.type}:${obj2.id}`, {}],
|
||||
[`${obj3.type}:${obj3.id}`, {}],
|
||||
|
@ -335,20 +357,20 @@ describe('#checkOriginConflicts', () => {
|
|||
const params = setup(false);
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
errors: [createConflictError(obj2, objA.id), createConflictError(obj4, objB.id)],
|
||||
pendingOverwrites: new Set(),
|
||||
};
|
||||
expect(checkOriginConflictsResult).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
test('returns object with a `importIdMap` entry when ignoreRegularConflicts=true', async () => {
|
||||
test('returns object with a `importStateMap` entry when ignoreRegularConflicts=true', async () => {
|
||||
const params = setup(true);
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importIdMap: new Map([
|
||||
[`${obj2.type}:${obj2.id}`, { id: objA.id }],
|
||||
[`${obj4.type}:${obj4.id}`, { id: objB.id }],
|
||||
importStateMap: new Map([
|
||||
[`${obj2.type}:${obj2.id}`, { destinationId: objA.id }],
|
||||
[`${obj4.type}:${obj4.id}`, { destinationId: objB.id }],
|
||||
]),
|
||||
errors: [],
|
||||
pendingOverwrites: new Set([`${obj2.type}:${obj2.id}`, `${obj4.type}:${obj4.id}`]),
|
||||
|
@ -359,7 +381,7 @@ describe('#checkOriginConflicts', () => {
|
|||
});
|
||||
|
||||
describe('ambiguous conflicts', () => {
|
||||
test('returns object with a `importIdMap` entry when multiple inexact matches are detected that target the same single destination', async () => {
|
||||
test('returns object with a `importStateMap` entry when multiple inexact matches are detected that target the same single destination', async () => {
|
||||
// objA and objB exist in this space
|
||||
// try to import obj1, obj2, obj3, and obj4
|
||||
const obj1 = createObject(MULTI_NS_TYPE, 'id-1');
|
||||
|
@ -377,16 +399,15 @@ describe('#checkOriginConflicts', () => {
|
|||
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importIdMap: new Map([
|
||||
[`${obj1.type}:${obj1.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj2.type}:${obj2.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj3.type}:${obj3.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj4.type}:${obj4.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
importStateMap: new Map([
|
||||
[`${obj1.type}:${obj1.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj2.type}:${obj2.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj3.type}:${obj3.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj4.type}:${obj4.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
]),
|
||||
errors: [],
|
||||
pendingOverwrites: new Set(),
|
||||
};
|
||||
expect(mockUuidv4).toHaveBeenCalledTimes(4);
|
||||
expect(checkOriginConflictsResult).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
|
@ -406,18 +427,17 @@ describe('#checkOriginConflicts', () => {
|
|||
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
errors: [
|
||||
createAmbiguousConflictError(obj1, [objB, objA]), // Assert that these have been sorted by updatedAt in descending order
|
||||
createAmbiguousConflictError(obj2, [objC, objD]), // Assert that these have been sorted by ID in ascending order (since their updatedAt values are the same)
|
||||
],
|
||||
pendingOverwrites: new Set(),
|
||||
};
|
||||
expect(mockUuidv4).not.toHaveBeenCalled();
|
||||
expect(checkOriginConflictsResult).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
test('returns object with a `importIdMap` entry when multiple inexact matches are detected that target the same multiple destinations', async () => {
|
||||
test('returns object with a `importStateMap` entry when multiple inexact matches are detected that target the same multiple destinations', async () => {
|
||||
// objA, objB, objC, and objD exist in this space
|
||||
// try to import obj1, obj2, obj3, and obj4
|
||||
const obj1 = createObject(MULTI_NS_TYPE, 'id-1');
|
||||
|
@ -437,16 +457,15 @@ describe('#checkOriginConflicts', () => {
|
|||
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importIdMap: new Map([
|
||||
[`${obj1.type}:${obj1.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj2.type}:${obj2.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj3.type}:${obj3.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj4.type}:${obj4.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
importStateMap: new Map([
|
||||
[`${obj1.type}:${obj1.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj2.type}:${obj2.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj3.type}:${obj3.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj4.type}:${obj4.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
]),
|
||||
errors: [],
|
||||
pendingOverwrites: new Set(),
|
||||
};
|
||||
expect(mockUuidv4).toHaveBeenCalledTimes(4);
|
||||
expect(checkOriginConflictsResult).toEqual(expectedResult);
|
||||
});
|
||||
});
|
||||
|
@ -470,10 +489,12 @@ describe('#checkOriginConflicts', () => {
|
|||
const objE = createObject(MULTI_NS_TYPE, 'id-E', obj7.id);
|
||||
const objects = [obj1, obj2, obj4, obj5, obj6, obj7, obj8];
|
||||
|
||||
const importIdMap = new Map([...objects, obj3].map(({ type, id }) => [`${type}:${id}`, {}]));
|
||||
const importStateMap = new Map(
|
||||
[...objects, obj3].map(({ type, id }) => [`${type}:${id}`, {}])
|
||||
);
|
||||
|
||||
const setup = (ignoreRegularConflicts: boolean) => {
|
||||
const params = setupParams({ objects, importIdMap, ignoreRegularConflicts });
|
||||
const params = setupParams({ objects, importStateMap, ignoreRegularConflicts });
|
||||
// obj1 is a non-multi-namespace type, so it is skipped while searching
|
||||
mockFindResult(); // find for obj2: the result is no match
|
||||
mockFindResult(obj3); // find for obj4: the result is an inexact match with one destination that is exactly matched by obj3 so it is ignored -- accordingly, obj4 has no match
|
||||
|
@ -488,9 +509,9 @@ describe('#checkOriginConflicts', () => {
|
|||
const params = setup(false);
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importIdMap: new Map([
|
||||
[`${obj7.type}:${obj7.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj8.type}:${obj8.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
importStateMap: new Map([
|
||||
[`${obj7.type}:${obj7.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj8.type}:${obj8.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
]),
|
||||
errors: [
|
||||
createConflictError(obj5, objA.id),
|
||||
|
@ -498,7 +519,6 @@ describe('#checkOriginConflicts', () => {
|
|||
],
|
||||
pendingOverwrites: new Set(),
|
||||
};
|
||||
expect(mockUuidv4).toHaveBeenCalledTimes(2);
|
||||
expect(checkOriginConflictsResult).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
|
@ -506,74 +526,16 @@ describe('#checkOriginConflicts', () => {
|
|||
const params = setup(true);
|
||||
const checkOriginConflictsResult = await checkOriginConflicts(params);
|
||||
const expectedResult = {
|
||||
importIdMap: new Map([
|
||||
[`${obj5.type}:${obj5.id}`, { id: objA.id }],
|
||||
[`${obj7.type}:${obj7.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj8.type}:${obj8.id}`, { id: 'uuidv4', omitOriginId: true }],
|
||||
importStateMap: new Map([
|
||||
[`${obj5.type}:${obj5.id}`, { destinationId: objA.id }],
|
||||
[`${obj7.type}:${obj7.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
[`${obj8.type}:${obj8.id}`, { destinationId: 'uuidv4', omitOriginId: true }],
|
||||
]),
|
||||
errors: [createAmbiguousConflictError(obj6, [objB, objC])],
|
||||
pendingOverwrites: new Set([`${obj5.type}:${obj5.id}`]),
|
||||
};
|
||||
expect(mockUuidv4).toHaveBeenCalledTimes(2);
|
||||
expect(checkOriginConflictsResult).toEqual(expectedResult);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getImportIdMapForRetries', () => {
|
||||
const createRetry = (
|
||||
{ type, id }: { type: string; id: string },
|
||||
params: { destinationId?: string; createNewCopy?: boolean } = {}
|
||||
): SavedObjectsImportRetry => {
|
||||
const { destinationId, createNewCopy } = params;
|
||||
return { type, id, overwrite: false, destinationId, replaceReferences: [], createNewCopy };
|
||||
};
|
||||
|
||||
test('throws an error if retry is not found for an object', async () => {
|
||||
const obj1 = createObject(MULTI_NS_TYPE, 'id-1');
|
||||
const obj2 = createObject(MULTI_NS_TYPE, 'id-2');
|
||||
const objects = [obj1, obj2];
|
||||
const retries = [createRetry(obj1)];
|
||||
const params = { objects, retries, createNewCopies: false };
|
||||
|
||||
expect(() => getImportIdMapForRetries(params)).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Retry was expected for \\"multi:id-2\\" but not found"`
|
||||
);
|
||||
});
|
||||
|
||||
test('returns expected results', async () => {
|
||||
const obj1 = createObject('type-1', 'id-1');
|
||||
const obj2 = createObject('type-2', 'id-2');
|
||||
const obj3 = createObject('type-3', 'id-3');
|
||||
const obj4 = createObject('type-4', 'id-4');
|
||||
const objects = [obj1, obj2, obj3, obj4];
|
||||
const retries = [
|
||||
createRetry(obj1), // retries that do not have `destinationId` specified are ignored
|
||||
createRetry(obj2, { destinationId: obj2.id }), // retries that have `id` that matches `destinationId` are ignored
|
||||
createRetry(obj3, { destinationId: 'id-X' }), // this retry will get added to the `importIdMap`!
|
||||
createRetry(obj4, { destinationId: 'id-Y', createNewCopy: true }), // this retry will get added to the `importIdMap`!
|
||||
];
|
||||
const params = { objects, retries, createNewCopies: false };
|
||||
|
||||
const checkOriginConflictsResult = await getImportIdMapForRetries(params);
|
||||
expect(checkOriginConflictsResult).toEqual(
|
||||
new Map([
|
||||
[`${obj3.type}:${obj3.id}`, { id: 'id-X', omitOriginId: false }],
|
||||
[`${obj4.type}:${obj4.id}`, { id: 'id-Y', omitOriginId: true }],
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
test('omits origin ID in `importIdMap` entries when createNewCopies=true', async () => {
|
||||
const obj = createObject('type-1', 'id-1');
|
||||
const objects = [obj];
|
||||
const retries = [createRetry(obj, { destinationId: 'id-X' })];
|
||||
const params = { objects, retries, createNewCopies: true };
|
||||
|
||||
const checkOriginConflictsResult = await getImportIdMapForRetries(params);
|
||||
expect(checkOriginConflictsResult).toEqual(
|
||||
new Map([[`${obj.type}:${obj.id}`, { id: 'id-X', omitOriginId: true }]])
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -8,13 +8,10 @@
|
|||
|
||||
import pMap from 'p-map';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import {
|
||||
SavedObject,
|
||||
SavedObjectsClientContract,
|
||||
SavedObjectsImportFailure,
|
||||
SavedObjectsImportRetry,
|
||||
} from '../../types';
|
||||
import { SavedObject, SavedObjectsClientContract, SavedObjectsImportFailure } from '../../types';
|
||||
import { ISavedObjectTypeRegistry } from '../../saved_objects_type_registry';
|
||||
import type { ImportStateMap } from './types';
|
||||
import { createOriginQuery } from './utils';
|
||||
|
||||
interface CheckOriginConflictsParams {
|
||||
objects: Array<SavedObject<{ title?: string }>>;
|
||||
|
@ -22,19 +19,15 @@ interface CheckOriginConflictsParams {
|
|||
typeRegistry: ISavedObjectTypeRegistry;
|
||||
namespace?: string;
|
||||
ignoreRegularConflicts?: boolean;
|
||||
importIdMap: Map<string, unknown>;
|
||||
importStateMap: ImportStateMap;
|
||||
pendingOverwrites: Set<string>;
|
||||
}
|
||||
|
||||
type CheckOriginConflictParams = Omit<CheckOriginConflictsParams, 'objects'> & {
|
||||
type CheckOriginConflictParams = Omit<CheckOriginConflictsParams, 'objects' | 'importIdMap'> & {
|
||||
object: SavedObject<{ title?: string }>;
|
||||
objectIdsBeingImported: Set<string>;
|
||||
};
|
||||
|
||||
interface GetImportIdMapForRetriesParams {
|
||||
objects: SavedObject[];
|
||||
retries: SavedObjectsImportRetry[];
|
||||
createNewCopies: boolean;
|
||||
}
|
||||
|
||||
interface InexactMatch<T> {
|
||||
object: SavedObject<T>;
|
||||
destinations: Array<{ id: string; title?: string; updatedAt?: string }>;
|
||||
|
@ -52,9 +45,6 @@ const isLeft = <T>(object: Either<T>): object is Left<T> => object.tag === 'left
|
|||
|
||||
const MAX_CONCURRENT_SEARCHES = 10;
|
||||
|
||||
const createQueryTerm = (input: string) => input.replace(/\\/g, '\\\\').replace(/\"/g, '\\"');
|
||||
const createQuery = (type: string, id: string, rawIdPrefix: string) =>
|
||||
`"${createQueryTerm(`${rawIdPrefix}${type}:${id}`)}" | "${createQueryTerm(id)}"`;
|
||||
const transformObjectsToAmbiguousConflictFields = (
|
||||
objects: Array<SavedObject<{ title?: string }>>
|
||||
) =>
|
||||
|
@ -81,25 +71,32 @@ const getAmbiguousConflictSourceKey = <T>({ object }: InexactMatch<T>) =>
|
|||
* specified namespace:
|
||||
* - A `Right` result indicates that no conflict destinations were found in this namespace ("no match").
|
||||
* - A `Left` result indicates that one or more conflict destinations exist in this namespace, none of which exactly match this object's ID
|
||||
* ("inexact match"). We can make this assumption because any "exact match" results would have been obtained and filtered out by the
|
||||
* `checkConflicts` submodule, which is called before this.
|
||||
* ("inexact match"). We can make this assumption because any "exact match" conflict errors would have been obtained and filtered out by
|
||||
* the `checkConflicts` submodule, which is called before this, *or* if `overwrite: true` is used, we explicitly filter out any pending
|
||||
* overwrites for exact matches.
|
||||
*/
|
||||
const checkOriginConflict = async (
|
||||
params: CheckOriginConflictParams
|
||||
): Promise<Either<{ title?: string }>> => {
|
||||
const { object, savedObjectsClient, typeRegistry, namespace, importIdMap } = params;
|
||||
const importIds = new Set(importIdMap.keys());
|
||||
const { type, originId } = object;
|
||||
const {
|
||||
object,
|
||||
savedObjectsClient,
|
||||
typeRegistry,
|
||||
namespace,
|
||||
objectIdsBeingImported,
|
||||
pendingOverwrites,
|
||||
} = params;
|
||||
const { type, originId, id } = object;
|
||||
|
||||
if (!typeRegistry.isMultiNamespace(type)) {
|
||||
if (!typeRegistry.isMultiNamespace(type) || pendingOverwrites.has(`${type}:${id}`)) {
|
||||
// Skip the search request for non-multi-namespace types, since by definition they cannot have inexact matches or ambiguous conflicts.
|
||||
// Also skip the search request for objects that we've already determined have an "exact match" conflict.
|
||||
return { tag: 'right', value: object };
|
||||
}
|
||||
|
||||
const search = createQuery(type, originId || object.id, namespace ? `${namespace}:` : '');
|
||||
const findOptions = {
|
||||
type,
|
||||
search,
|
||||
search: createOriginQuery(type, originId || id),
|
||||
rootSearchFields: ['_id', 'originId'],
|
||||
page: 1,
|
||||
perPage: 10,
|
||||
|
@ -114,7 +111,9 @@ const checkOriginConflict = async (
|
|||
return { tag: 'right', value: object };
|
||||
}
|
||||
// This is an "inexact match" so far; filter the conflict destination(s) to exclude any that exactly match other objects we are importing.
|
||||
const objects = savedObjects.filter((obj) => !importIds.has(`${obj.type}:${obj.id}`));
|
||||
const objects = savedObjects.filter(
|
||||
(obj) => !objectIdsBeingImported.has(`${obj.type}:${obj.id}`)
|
||||
);
|
||||
const destinations = transformObjectsToAmbiguousConflictFields(objects);
|
||||
if (destinations.length === 0) {
|
||||
// No conflict destinations remain after filtering, so this is a "no match" result.
|
||||
|
@ -137,14 +136,20 @@ const checkOriginConflict = async (
|
|||
* that match this object's `originId` or `id` exist in the specified namespace:
|
||||
* - If this is a `Right` result; return the import object and allow `createSavedObjects` to handle the conflict (if any).
|
||||
* - If this is a `Left` "partial match" result:
|
||||
* A. If there is a single source and destination match, add the destination to the importIdMap and return the import object, which
|
||||
* A. If there is a single source and destination match, add the destination to the importStateMap and return the import object, which
|
||||
* will allow `createSavedObjects` to modify the ID before creating the object (thus ensuring a conflict during).
|
||||
* B. Otherwise, this is an "ambiguous conflict" result; return an error.
|
||||
*/
|
||||
export async function checkOriginConflicts({ objects, ...params }: CheckOriginConflictsParams) {
|
||||
const objectIdsBeingImported = new Set<string>();
|
||||
for (const [key, { isOnlyReference }] of params.importStateMap.entries()) {
|
||||
if (!isOnlyReference) {
|
||||
objectIdsBeingImported.add(key);
|
||||
}
|
||||
}
|
||||
// Check each object for possible destination conflicts, ensuring we don't too many concurrent searches running.
|
||||
const mapper = async (object: SavedObject<{ title?: string }>) =>
|
||||
checkOriginConflict({ object, ...params });
|
||||
checkOriginConflict({ object, objectIdsBeingImported, ...params });
|
||||
const checkOriginConflictResults = await pMap(objects, mapper, {
|
||||
concurrency: MAX_CONCURRENT_SEARCHES,
|
||||
});
|
||||
|
@ -159,7 +164,7 @@ export async function checkOriginConflicts({ objects, ...params }: CheckOriginCo
|
|||
}, new Map<string, Array<SavedObject<{ title?: string }>>>());
|
||||
|
||||
const errors: SavedObjectsImportFailure[] = [];
|
||||
const importIdMap = new Map<string, { id: string; omitOriginId?: boolean }>();
|
||||
const importStateMap: ImportStateMap = new Map();
|
||||
const pendingOverwrites = new Set<string>();
|
||||
checkOriginConflictResults.forEach((result) => {
|
||||
if (!isLeft(result)) {
|
||||
|
@ -174,7 +179,7 @@ export async function checkOriginConflicts({ objects, ...params }: CheckOriginCo
|
|||
if (sources.length === 1 && destinations.length === 1) {
|
||||
// This is a simple "inexact match" result -- a single import object has a single destination conflict.
|
||||
if (params.ignoreRegularConflicts) {
|
||||
importIdMap.set(`${type}:${id}`, { id: destinations[0].id });
|
||||
importStateMap.set(`${type}:${id}`, { destinationId: destinations[0].id });
|
||||
pendingOverwrites.add(`${type}:${id}`);
|
||||
} else {
|
||||
const { title } = attributes;
|
||||
|
@ -198,7 +203,7 @@ export async function checkOriginConflicts({ objects, ...params }: CheckOriginCo
|
|||
if (sources.length > 1) {
|
||||
// In the case of ambiguous source conflicts, don't treat them as errors; instead, regenerate the object ID and reset its origin
|
||||
// (e.g., the same outcome as if `createNewCopies` was enabled for the entire import operation).
|
||||
importIdMap.set(`${type}:${id}`, { id: uuidv4(), omitOriginId: true });
|
||||
importStateMap.set(`${type}:${id}`, { destinationId: uuidv4(), omitOriginId: true });
|
||||
return;
|
||||
}
|
||||
const { title } = attributes;
|
||||
|
@ -214,32 +219,5 @@ export async function checkOriginConflicts({ objects, ...params }: CheckOriginCo
|
|||
});
|
||||
});
|
||||
|
||||
return { errors, importIdMap, pendingOverwrites };
|
||||
}
|
||||
|
||||
/**
|
||||
* Assume that all objects exist in the `retries` map (due to filtering at the beginning of `resolveSavedObjectsImportErrors`).
|
||||
*/
|
||||
export function getImportIdMapForRetries(params: GetImportIdMapForRetriesParams) {
|
||||
const { objects, retries, createNewCopies } = params;
|
||||
|
||||
const retryMap = retries.reduce(
|
||||
(acc, cur) => acc.set(`${cur.type}:${cur.id}`, cur),
|
||||
new Map<string, SavedObjectsImportRetry>()
|
||||
);
|
||||
const importIdMap = new Map<string, { id: string; omitOriginId?: boolean }>();
|
||||
|
||||
objects.forEach(({ type, id }) => {
|
||||
const retry = retryMap.get(`${type}:${id}`);
|
||||
if (!retry) {
|
||||
throw new Error(`Retry was expected for "${type}:${id}" but not found`);
|
||||
}
|
||||
const { destinationId } = retry;
|
||||
const omitOriginId = createNewCopies || Boolean(retry.createNewCopy);
|
||||
if (destinationId && destinationId !== id) {
|
||||
importIdMap.set(`${type}:${id}`, { id: destinationId, omitOriginId });
|
||||
}
|
||||
});
|
||||
|
||||
return importIdMap;
|
||||
return { errors, importStateMap, pendingOverwrites };
|
||||
}
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { createOriginQuery } from './utils';
|
||||
|
||||
export const mockCreateOriginQuery = jest.fn() as jest.MockedFunction<typeof createOriginQuery>;
|
||||
jest.mock('./utils', () => ({
|
||||
createOriginQuery: mockCreateOriginQuery,
|
||||
}));
|
|
@ -0,0 +1,182 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { mockCreateOriginQuery } from './check_reference_origins.test.mock';
|
||||
|
||||
import type { SavedObjectsFindResult } from '../../service';
|
||||
import type { SavedObjectsClientContract } from '../../types';
|
||||
import { checkReferenceOrigins, CheckReferenceOriginsParams } from './check_reference_origins';
|
||||
import { savedObjectsClientMock } from '../../../mocks';
|
||||
import { typeRegistryMock } from '../../saved_objects_type_registry.mock';
|
||||
import { ISavedObjectTypeRegistry } from '../../saved_objects_type_registry';
|
||||
import type { ImportStateMap } from './types';
|
||||
|
||||
const MULTI_NS_TYPE = 'multi';
|
||||
const OTHER_TYPE = 'other';
|
||||
|
||||
describe('checkReferenceOrigins', () => {
|
||||
let savedObjectsClient: jest.Mocked<SavedObjectsClientContract>;
|
||||
let typeRegistry: jest.Mocked<ISavedObjectTypeRegistry>;
|
||||
let find: typeof savedObjectsClient['find'];
|
||||
|
||||
const getResultMock = (...objectIds: string[]) => ({
|
||||
page: 1,
|
||||
per_page: 1,
|
||||
total: objectIds.length,
|
||||
saved_objects: objectIds.map((id) => ({ id, score: 0 } as unknown as SavedObjectsFindResult)),
|
||||
});
|
||||
|
||||
const setupParams = (partial: {
|
||||
namespace?: string;
|
||||
importStateMap: ImportStateMap;
|
||||
}): CheckReferenceOriginsParams => {
|
||||
savedObjectsClient = savedObjectsClientMock.create();
|
||||
find = savedObjectsClient.find;
|
||||
find.mockResolvedValue(getResultMock()); // mock zero hits response by default
|
||||
typeRegistry = typeRegistryMock.create();
|
||||
typeRegistry.isMultiNamespace.mockImplementation((type) => type === MULTI_NS_TYPE);
|
||||
return {
|
||||
...partial,
|
||||
savedObjectsClient,
|
||||
typeRegistry,
|
||||
};
|
||||
};
|
||||
|
||||
const mockFindResult = (...objectIds: string[]) => {
|
||||
// doesn't matter if the mocked result is a "realistic" object, it just needs an `id` field
|
||||
find.mockResolvedValueOnce(getResultMock(...objectIds));
|
||||
};
|
||||
|
||||
describe('cluster calls', () => {
|
||||
beforeEach(() => {
|
||||
mockCreateOriginQuery.mockClear();
|
||||
});
|
||||
|
||||
const expectFindArgs = (n: number, type: string, id: string) => {
|
||||
expect(mockCreateOriginQuery).toHaveBeenNthCalledWith(n, type, id);
|
||||
// exclude namespace from assertion -- a separate test covers that
|
||||
expect(find).toHaveBeenNthCalledWith(n, expect.objectContaining({ type }));
|
||||
};
|
||||
|
||||
test('does not execute searches for non-multi-namespace objects', async () => {
|
||||
const params = setupParams({
|
||||
importStateMap: new Map([[`${OTHER_TYPE}:1`, { isOnlyReference: true }]]),
|
||||
});
|
||||
|
||||
await checkReferenceOrigins(params);
|
||||
expect(find).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('does not execute searches for multi-namespace objects without the isOnlyReference attribute', async () => {
|
||||
const params = setupParams({
|
||||
importStateMap: new Map([[`${MULTI_NS_TYPE}:1`, {}]]),
|
||||
});
|
||||
|
||||
await checkReferenceOrigins(params);
|
||||
expect(find).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('executes searches for multi-namespace objects with the isOnlyReference attribute', async () => {
|
||||
const params = setupParams({
|
||||
importStateMap: new Map([[`${MULTI_NS_TYPE}:1`, { isOnlyReference: true }]]),
|
||||
});
|
||||
|
||||
await checkReferenceOrigins(params);
|
||||
expect(find).toHaveBeenCalledTimes(1);
|
||||
expectFindArgs(1, MULTI_NS_TYPE, '1');
|
||||
});
|
||||
|
||||
test('executes mixed searches', async () => {
|
||||
const params = setupParams({
|
||||
importStateMap: new Map([
|
||||
[`${MULTI_NS_TYPE}:1`, {}],
|
||||
[`${MULTI_NS_TYPE}:2`, { isOnlyReference: true }],
|
||||
[`${OTHER_TYPE}:3`, { isOnlyReference: true }],
|
||||
[`${MULTI_NS_TYPE}:4`, { isOnlyReference: true }],
|
||||
]),
|
||||
});
|
||||
|
||||
await checkReferenceOrigins(params);
|
||||
expect(find).toHaveBeenCalledTimes(2);
|
||||
expectFindArgs(1, MULTI_NS_TYPE, '2');
|
||||
expectFindArgs(2, MULTI_NS_TYPE, '4');
|
||||
});
|
||||
|
||||
test('searches within the current `namespace`', async () => {
|
||||
const namespace = 'some-namespace';
|
||||
const params = setupParams({
|
||||
namespace,
|
||||
importStateMap: new Map([[`${MULTI_NS_TYPE}:1`, { isOnlyReference: true }]]),
|
||||
});
|
||||
|
||||
await checkReferenceOrigins(params);
|
||||
expect(find).toHaveBeenCalledTimes(1);
|
||||
expect(find).toHaveBeenCalledWith(expect.objectContaining({ namespaces: [namespace] }));
|
||||
});
|
||||
});
|
||||
|
||||
describe('results', () => {
|
||||
test('does not return an entry if search resulted in 0 matches', async () => {
|
||||
const params = setupParams({
|
||||
importStateMap: new Map([[`${MULTI_NS_TYPE}:1`, { isOnlyReference: true }]]),
|
||||
});
|
||||
// mock find returns an empty search result by default
|
||||
|
||||
const checkReferenceOriginsResult = await checkReferenceOrigins(params);
|
||||
|
||||
const expectedResult = {
|
||||
importStateMap: new Map(),
|
||||
};
|
||||
expect(checkReferenceOriginsResult).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
test('does not return an entry if search resulted in 2+ matches', async () => {
|
||||
const params = setupParams({
|
||||
importStateMap: new Map([[`${MULTI_NS_TYPE}:1`, { isOnlyReference: true }]]),
|
||||
});
|
||||
mockFindResult('2', '3');
|
||||
|
||||
const checkReferenceOriginsResult = await checkReferenceOrigins(params);
|
||||
|
||||
const expectedResult = {
|
||||
importStateMap: new Map(),
|
||||
};
|
||||
expect(checkReferenceOriginsResult).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
test('does not return an entry if search resulted in 1 match with the same ID', async () => {
|
||||
const params = setupParams({
|
||||
importStateMap: new Map([[`${MULTI_NS_TYPE}:1`, { isOnlyReference: true }]]),
|
||||
});
|
||||
mockFindResult('1');
|
||||
|
||||
const checkReferenceOriginsResult = await checkReferenceOrigins(params);
|
||||
|
||||
const expectedResult = {
|
||||
importStateMap: new Map(),
|
||||
};
|
||||
expect(checkReferenceOriginsResult).toEqual(expectedResult);
|
||||
});
|
||||
|
||||
test('returns an entry if search resulted in 1 match with a different ID', async () => {
|
||||
const params = setupParams({
|
||||
importStateMap: new Map([[`${MULTI_NS_TYPE}:1`, { isOnlyReference: true }]]),
|
||||
});
|
||||
mockFindResult('2');
|
||||
|
||||
const checkReferenceOriginsResult = await checkReferenceOrigins(params);
|
||||
|
||||
const expectedResult = {
|
||||
importStateMap: new Map([
|
||||
[`${MULTI_NS_TYPE}:1`, { isOnlyReference: true, destinationId: '2' }],
|
||||
]),
|
||||
};
|
||||
expect(checkReferenceOriginsResult).toEqual(expectedResult);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,91 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import pMap from 'p-map';
|
||||
import { SavedObjectsClientContract } from '../../types';
|
||||
import { ISavedObjectTypeRegistry } from '../../saved_objects_type_registry';
|
||||
import type { ImportStateMap, ImportStateValue } from './types';
|
||||
import { getObjectKey, parseObjectKey } from '../../service/lib/internal_utils';
|
||||
import { createOriginQuery } from './utils';
|
||||
|
||||
export interface CheckReferenceOriginsParams {
|
||||
savedObjectsClient: SavedObjectsClientContract;
|
||||
typeRegistry: ISavedObjectTypeRegistry;
|
||||
namespace?: string;
|
||||
importStateMap: ImportStateMap;
|
||||
}
|
||||
|
||||
interface Reference {
|
||||
type: string;
|
||||
id: string;
|
||||
}
|
||||
|
||||
const MAX_CONCURRENT_SEARCHES = 10;
|
||||
|
||||
/**
|
||||
* Searches for any existing object(s) for the given reference; if there is exactly one object with a matching origin *and* its ID is
|
||||
* different than this reference ID, then this returns the different ID. Otherwise, it returns null.
|
||||
*/
|
||||
async function checkOrigin(
|
||||
type: string,
|
||||
id: string,
|
||||
savedObjectsClient: SavedObjectsClientContract,
|
||||
namespace: string | undefined
|
||||
) {
|
||||
const findOptions = {
|
||||
type,
|
||||
search: createOriginQuery(type, id),
|
||||
rootSearchFields: ['_id', 'originId'],
|
||||
page: 1,
|
||||
perPage: 1, // we only need one result for now
|
||||
fields: ['title'], // we don't actually need the object's title, we just specify one field so we don't fetch *all* fields
|
||||
sortField: 'updated_at',
|
||||
sortOrder: 'desc' as const,
|
||||
...(namespace && { namespaces: [namespace] }),
|
||||
};
|
||||
const findResult = await savedObjectsClient.find<{ title?: string }>(findOptions);
|
||||
const { total, saved_objects: savedObjects } = findResult;
|
||||
if (total === 1) {
|
||||
const [object] = savedObjects;
|
||||
if (id !== object.id) {
|
||||
return {
|
||||
key: getObjectKey({ type, id }),
|
||||
value: { isOnlyReference: true, destinationId: object.id } as ImportStateValue,
|
||||
};
|
||||
}
|
||||
}
|
||||
// TODO: if the total is 2+, return an "ambiguous reference origin match" to the consumer (#120313)
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function checkReferenceOrigins(params: CheckReferenceOriginsParams) {
|
||||
const { savedObjectsClient, namespace } = params;
|
||||
const referencesToCheck: Reference[] = [];
|
||||
for (const [key, { isOnlyReference }] of params.importStateMap.entries()) {
|
||||
const { type, id } = parseObjectKey(key);
|
||||
if (params.typeRegistry.isMultiNamespace(type) && isOnlyReference) {
|
||||
referencesToCheck.push({ type, id });
|
||||
}
|
||||
}
|
||||
// Check each object for possible destination conflicts, ensuring we don't too many concurrent searches running.
|
||||
const mapper = async ({ type, id }: Reference) =>
|
||||
checkOrigin(type, id, savedObjectsClient, namespace);
|
||||
const checkOriginResults = await pMap(referencesToCheck, mapper, {
|
||||
concurrency: MAX_CONCURRENT_SEARCHES,
|
||||
});
|
||||
|
||||
const importStateMap: ImportStateMap = new Map();
|
||||
for (const result of checkOriginResults) {
|
||||
if (result) {
|
||||
const { key, value } = result;
|
||||
importStateMap.set(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
return { importStateMap };
|
||||
}
|
|
@ -39,8 +39,18 @@ describe('collectSavedObjects()', () => {
|
|||
},
|
||||
});
|
||||
|
||||
const obj1 = { type: 'a', id: '1', attributes: { title: 'my title 1' } };
|
||||
const obj2 = { type: 'b', id: '2', attributes: { title: 'my title 2' } };
|
||||
const obj1 = {
|
||||
type: 'a',
|
||||
id: '1',
|
||||
attributes: { title: 'my title 1' },
|
||||
references: [{ type: 'b', id: '2', name: 'b2' }],
|
||||
};
|
||||
const obj2 = {
|
||||
type: 'b',
|
||||
id: '2',
|
||||
attributes: { title: 'my title 2' },
|
||||
references: [{ type: 'c', id: '3', name: 'c3' }],
|
||||
};
|
||||
|
||||
describe('module calls', () => {
|
||||
test('limit stream with empty input stream is called with null', async () => {
|
||||
|
@ -120,17 +130,24 @@ describe('collectSavedObjects()', () => {
|
|||
const readStream = createReadStream();
|
||||
const result = await collectSavedObjects({ readStream, supportedTypes: [], objectLimit });
|
||||
|
||||
expect(result).toEqual({ collectedObjects: [], errors: [], importIdMap: new Map() });
|
||||
expect(result).toEqual({ collectedObjects: [], errors: [], importStateMap: new Map() });
|
||||
});
|
||||
|
||||
test('collects objects from stream', async () => {
|
||||
const readStream = createReadStream(obj1);
|
||||
const supportedTypes = [obj1.type];
|
||||
const readStream = createReadStream(obj1, obj2);
|
||||
const supportedTypes = [obj1.type, obj2.type];
|
||||
const result = await collectSavedObjects({ readStream, supportedTypes, objectLimit });
|
||||
|
||||
const collectedObjects = [{ ...obj1, migrationVersion: {} }];
|
||||
const importIdMap = new Map([[`${obj1.type}:${obj1.id}`, {}]]);
|
||||
expect(result).toEqual({ collectedObjects, errors: [], importIdMap });
|
||||
const collectedObjects = [
|
||||
{ ...obj1, migrationVersion: {} },
|
||||
{ ...obj2, migrationVersion: {} },
|
||||
];
|
||||
const importStateMap = new Map([
|
||||
[`a:1`, {}], // a:1 is included because it is present in the collected objects
|
||||
[`b:2`, {}], // b:2 is included because it is present in the collected objects
|
||||
[`c:3`, { isOnlyReference: true }], // c:3 is included because b:2 has a reference to c:3, but this is marked as `isOnlyReference` because c:3 is not present in the collected objects
|
||||
]);
|
||||
expect(result).toEqual({ collectedObjects, errors: [], importStateMap });
|
||||
});
|
||||
|
||||
test('unsupported types return as import errors', async () => {
|
||||
|
@ -141,20 +158,24 @@ describe('collectSavedObjects()', () => {
|
|||
const error = { type: 'unsupported_type' };
|
||||
const { title } = obj1.attributes;
|
||||
const errors = [{ error, type: obj1.type, id: obj1.id, title, meta: { title } }];
|
||||
expect(result).toEqual({ collectedObjects: [], errors, importIdMap: new Map() });
|
||||
expect(result).toEqual({ collectedObjects: [], errors, importStateMap: new Map() });
|
||||
});
|
||||
|
||||
test('returns mixed results', async () => {
|
||||
const readStream = createReadStream(obj1, obj2);
|
||||
const supportedTypes = [obj2.type];
|
||||
const supportedTypes = [obj1.type];
|
||||
const result = await collectSavedObjects({ readStream, supportedTypes, objectLimit });
|
||||
|
||||
const collectedObjects = [{ ...obj2, migrationVersion: {} }];
|
||||
const importIdMap = new Map([[`${obj2.type}:${obj2.id}`, {}]]);
|
||||
const collectedObjects = [{ ...obj1, migrationVersion: {} }];
|
||||
const importStateMap = new Map([
|
||||
[`a:1`, {}], // a:1 is included because it is present in the collected objects
|
||||
[`b:2`, { isOnlyReference: true }], // b:2 was filtered out due to an unsupported type; b:2 is included because a:1 has a reference to b:2, but this is marked as `isOnlyReference` because b:2 is not present in the collected objects
|
||||
// c:3 is not included at all, because b:2 was filtered out and there are no other references to c:3
|
||||
]);
|
||||
const error = { type: 'unsupported_type' };
|
||||
const { title } = obj1.attributes;
|
||||
const errors = [{ error, type: obj1.type, id: obj1.id, title, meta: { title } }];
|
||||
expect(result).toEqual({ collectedObjects, errors, importIdMap });
|
||||
const { title } = obj2.attributes;
|
||||
const errors = [{ error, type: obj2.type, id: obj2.id, title, meta: { title } }];
|
||||
expect(result).toEqual({ collectedObjects, errors, importStateMap });
|
||||
});
|
||||
|
||||
describe('with optional filter', () => {
|
||||
|
@ -172,7 +193,7 @@ describe('collectSavedObjects()', () => {
|
|||
const error = { type: 'unsupported_type' };
|
||||
const { title } = obj1.attributes;
|
||||
const errors = [{ error, type: obj1.type, id: obj1.id, title, meta: { title } }];
|
||||
expect(result).toEqual({ collectedObjects: [], errors, importIdMap: new Map() });
|
||||
expect(result).toEqual({ collectedObjects: [], errors, importStateMap: new Map() });
|
||||
});
|
||||
|
||||
test('does not filter out objects when result === true', async () => {
|
||||
|
@ -187,11 +208,15 @@ describe('collectSavedObjects()', () => {
|
|||
});
|
||||
|
||||
const collectedObjects = [{ ...obj2, migrationVersion: {} }];
|
||||
const importIdMap = new Map([[`${obj2.type}:${obj2.id}`, {}]]);
|
||||
const importStateMap = new Map([
|
||||
// a:1 was filtered out due to an unsupported type; a:1 is not included because there are no other references to a:1
|
||||
[`b:2`, {}], // b:2 is included because it is present in the collected objects
|
||||
[`c:3`, { isOnlyReference: true }], // c:3 is included because b:2 has a reference to c:3, but this is marked as `isOnlyReference` because c:3 is not present in the collected objects
|
||||
]);
|
||||
const error = { type: 'unsupported_type' };
|
||||
const { title } = obj1.attributes;
|
||||
const errors = [{ error, type: obj1.type, id: obj1.id, title, meta: { title } }];
|
||||
expect(result).toEqual({ collectedObjects, errors, importIdMap });
|
||||
expect(result).toEqual({ collectedObjects, errors, importStateMap });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -19,6 +19,7 @@ import { SavedObjectsImportFailure } from '../types';
|
|||
import { SavedObjectsImportError } from '../errors';
|
||||
import { getNonUniqueEntries } from './get_non_unique_entries';
|
||||
import { createLimitStream } from './create_limit_stream';
|
||||
import type { ImportStateMap } from './types';
|
||||
|
||||
interface CollectSavedObjectsOptions {
|
||||
readStream: Readable;
|
||||
|
@ -35,7 +36,7 @@ export async function collectSavedObjects({
|
|||
}: CollectSavedObjectsOptions) {
|
||||
const errors: SavedObjectsImportFailure[] = [];
|
||||
const entries: Array<{ type: string; id: string }> = [];
|
||||
const importIdMap = new Map<string, { id?: string; omitOriginId?: boolean }>();
|
||||
const importStateMap: ImportStateMap = new Map();
|
||||
const collectedObjects: Array<SavedObject<{ title?: string }>> = await createPromiseFromStreams([
|
||||
readStream,
|
||||
createLimitStream(objectLimit),
|
||||
|
@ -58,7 +59,13 @@ export async function collectSavedObjects({
|
|||
}),
|
||||
createFilterStream<SavedObject>((obj) => (filter ? filter(obj) : true)),
|
||||
createMapStream((obj: SavedObject) => {
|
||||
importIdMap.set(`${obj.type}:${obj.id}`, {});
|
||||
importStateMap.set(`${obj.type}:${obj.id}`, {});
|
||||
for (const ref of obj.references ?? []) {
|
||||
const key = `${ref.type}:${ref.id}`;
|
||||
if (!importStateMap.has(key)) {
|
||||
importStateMap.set(key, { isOnlyReference: true });
|
||||
}
|
||||
}
|
||||
// Ensure migrations execute on every saved object
|
||||
return Object.assign({ migrationVersion: {} }, obj);
|
||||
}),
|
||||
|
@ -74,6 +81,6 @@ export async function collectSavedObjects({
|
|||
return {
|
||||
errors,
|
||||
collectedObjects,
|
||||
importIdMap,
|
||||
importStateMap,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -23,8 +23,8 @@ const createObject = (type: string, id: string, originId?: string): SavedObject
|
|||
attributes: {},
|
||||
references: [
|
||||
{ name: 'name-1', type: 'other-type', id: 'other-id' }, // object that is not present
|
||||
{ name: 'name-2', type: MULTI_NS_TYPE, id: 'id-1' }, // object that is present, but does not have an importIdMap entry
|
||||
{ name: 'name-3', type: MULTI_NS_TYPE, id: 'id-3' }, // object that is present and has an importIdMap entry
|
||||
{ name: 'name-2', type: MULTI_NS_TYPE, id: 'id-1' }, // object that is present, but does not have an importStateMap entry
|
||||
{ name: 'name-3', type: MULTI_NS_TYPE, id: 'id-3' }, // object that is present and has an importStateMap entry
|
||||
],
|
||||
...(originId && { originId }),
|
||||
});
|
||||
|
@ -52,10 +52,10 @@ const obj13 = createObject(OTHER_TYPE, 'id-13'); // -> conflict
|
|||
const importId3 = 'id-foo';
|
||||
const importId4 = 'id-bar';
|
||||
const importId8 = 'id-baz';
|
||||
const importIdMap = new Map([
|
||||
[`${obj3.type}:${obj3.id}`, { id: importId3, omitOriginId: true }],
|
||||
[`${obj4.type}:${obj4.id}`, { id: importId4 }],
|
||||
[`${obj8.type}:${obj8.id}`, { id: importId8 }],
|
||||
const importStateMap = new Map([
|
||||
[`${obj3.type}:${obj3.id}`, { destinationId: importId3, omitOriginId: true }],
|
||||
[`${obj4.type}:${obj4.id}`, { destinationId: importId4 }],
|
||||
[`${obj8.type}:${obj8.id}`, { destinationId: importId8 }],
|
||||
]);
|
||||
|
||||
describe('#createSavedObjects', () => {
|
||||
|
@ -74,7 +74,7 @@ describe('#createSavedObjects', () => {
|
|||
}): CreateSavedObjectsParams => {
|
||||
savedObjectsClient = savedObjectsClientMock.create();
|
||||
bulkCreate = savedObjectsClient.bulkCreate;
|
||||
return { accumulatedErrors: [], ...partial, savedObjectsClient, importIdMap };
|
||||
return { accumulatedErrors: [], ...partial, savedObjectsClient, importStateMap };
|
||||
};
|
||||
|
||||
const getExpectedBulkCreateArgsObjects = (objects: SavedObject[], retry?: boolean) =>
|
||||
|
@ -84,8 +84,8 @@ describe('#createSavedObjects', () => {
|
|||
attributes,
|
||||
references: [
|
||||
{ name: 'name-1', type: 'other-type', id: 'other-id' }, // object that is not present
|
||||
{ name: 'name-2', type: MULTI_NS_TYPE, id: 'id-1' }, // object that is present, but does not have an importIdMap entry
|
||||
{ name: 'name-3', type: MULTI_NS_TYPE, id: 'id-foo' }, // object that is present and has an importIdMap entry
|
||||
{ name: 'name-2', type: MULTI_NS_TYPE, id: 'id-1' }, // object that is present, but does not have an importStateMap entry
|
||||
{ name: 'name-3', type: MULTI_NS_TYPE, id: 'id-foo' }, // object that is present and has an importStateMap entry
|
||||
],
|
||||
// if the import object had an originId, and/or if we regenerated the id, expect an originId to be included in the create args
|
||||
...((originId || retry) && { originId: originId || id }),
|
||||
|
@ -245,7 +245,7 @@ describe('#createSavedObjects', () => {
|
|||
|
||||
await createSavedObjects(options);
|
||||
expect(bulkCreate).toHaveBeenCalledTimes(1);
|
||||
// these three objects are transformed before being created, because they are included in the `importIdMap`
|
||||
// these three objects are transformed before being created, because they are included in the `importStateMap`
|
||||
const x3 = { ...obj3, id: importId3, originId: undefined }; // this import object already has an originId, but the entry has omitOriginId=true
|
||||
const x4 = { ...obj4, id: importId4 }; // this import object already has an originId
|
||||
const x8 = { ...obj8, id: importId8, originId: obj8.id }; // this import object doesn't have an originId, so it is set before create
|
||||
|
|
|
@ -9,16 +9,17 @@
|
|||
import { SavedObject, SavedObjectsClientContract, SavedObjectsImportFailure } from '../../types';
|
||||
import { extractErrors } from './extract_errors';
|
||||
import { CreatedObject } from '../types';
|
||||
import type { ImportStateMap } from './types';
|
||||
|
||||
interface CreateSavedObjectsParams<T> {
|
||||
export interface CreateSavedObjectsParams<T> {
|
||||
objects: Array<SavedObject<T>>;
|
||||
accumulatedErrors: SavedObjectsImportFailure[];
|
||||
savedObjectsClient: SavedObjectsClientContract;
|
||||
importIdMap: Map<string, { id?: string; omitOriginId?: boolean }>;
|
||||
importStateMap: ImportStateMap;
|
||||
namespace?: string;
|
||||
overwrite?: boolean;
|
||||
}
|
||||
interface CreateSavedObjectsResult<T> {
|
||||
export interface CreateSavedObjectsResult<T> {
|
||||
createdObjects: Array<CreatedObject<T>>;
|
||||
errors: SavedObjectsImportFailure[];
|
||||
}
|
||||
|
@ -31,7 +32,7 @@ export const createSavedObjects = async <T>({
|
|||
objects,
|
||||
accumulatedErrors,
|
||||
savedObjectsClient,
|
||||
importIdMap,
|
||||
importStateMap,
|
||||
namespace,
|
||||
overwrite,
|
||||
}: CreateSavedObjectsParams<T>): Promise<CreateSavedObjectsResult<T>> => {
|
||||
|
@ -58,19 +59,24 @@ export const createSavedObjects = async <T>({
|
|||
// use the import ID map to ensure that each reference is being created with the correct ID
|
||||
const references = object.references?.map((reference) => {
|
||||
const { type, id } = reference;
|
||||
const importIdEntry = importIdMap.get(`${type}:${id}`);
|
||||
if (importIdEntry?.id) {
|
||||
return { ...reference, id: importIdEntry.id };
|
||||
const importStateValue = importStateMap.get(`${type}:${id}`);
|
||||
if (importStateValue?.destinationId) {
|
||||
return { ...reference, id: importStateValue.destinationId };
|
||||
}
|
||||
return reference;
|
||||
});
|
||||
// use the import ID map to ensure that each object is being created with the correct ID, also ensure that the `originId` is set on
|
||||
// the created object if it did not have one (or is omitted if specified)
|
||||
const importIdEntry = importIdMap.get(`${object.type}:${object.id}`);
|
||||
if (importIdEntry?.id) {
|
||||
objectIdMap.set(`${object.type}:${importIdEntry.id}`, object);
|
||||
const originId = importIdEntry.omitOriginId ? undefined : object.originId ?? object.id;
|
||||
return { ...object, id: importIdEntry.id, originId, ...(references && { references }) };
|
||||
const importStateValue = importStateMap.get(`${object.type}:${object.id}`);
|
||||
if (importStateValue?.destinationId) {
|
||||
objectIdMap.set(`${object.type}:${importStateValue.destinationId}`, object);
|
||||
const originId = importStateValue.omitOriginId ? undefined : object.originId ?? object.id;
|
||||
return {
|
||||
...object,
|
||||
id: importStateValue.destinationId,
|
||||
originId,
|
||||
...(references && { references }),
|
||||
};
|
||||
}
|
||||
return { ...object, ...(references && { references }) };
|
||||
});
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
import { SavedObject } from '../../types';
|
||||
import { SavedObjectsImportHook, SavedObjectsImportWarning } from '../types';
|
||||
|
||||
interface ExecuteImportHooksOptions {
|
||||
export interface ExecuteImportHooksOptions {
|
||||
objects: SavedObject[];
|
||||
importHooks: Record<string, SavedObjectsImportHook[]>;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { SavedObject } from '../../types';
|
||||
import type { SavedObjectsImportRetry } from '../types';
|
||||
import { getImportStateMapForRetries } from './get_import_state_map_for_retries';
|
||||
|
||||
describe('#getImportStateMapForRetries', () => {
|
||||
const createRetry = (
|
||||
{ type, id }: { type: string; id: string },
|
||||
params: { destinationId?: string; createNewCopy?: boolean } = {}
|
||||
): SavedObjectsImportRetry => {
|
||||
const { destinationId, createNewCopy } = params;
|
||||
return { type, id, overwrite: false, destinationId, replaceReferences: [], createNewCopy };
|
||||
};
|
||||
|
||||
test('throws an error if retry is not found for an object', async () => {
|
||||
const obj1 = { type: 'type-1', id: 'id-1' };
|
||||
const obj2 = { type: 'type-2', id: 'id-2' };
|
||||
const objects = [obj1, obj2] as SavedObject[];
|
||||
const retries = [createRetry(obj1)];
|
||||
const params = { objects, retries, createNewCopies: false };
|
||||
|
||||
expect(() => getImportStateMapForRetries(params)).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Retry was expected for \\"type-2:id-2\\" but not found"`
|
||||
);
|
||||
});
|
||||
|
||||
test('returns expected results', async () => {
|
||||
const obj1 = { type: 'type-1', id: 'id-1' };
|
||||
const obj2 = { type: 'type-2', id: 'id-2' };
|
||||
const obj3 = { type: 'type-3', id: 'id-3' };
|
||||
const obj4 = { type: 'type-4', id: 'id-4' };
|
||||
const objects = [obj1, obj2, obj3, obj4] as SavedObject[];
|
||||
const retries = [
|
||||
createRetry(obj1), // retries that do not have `destinationId` specified are ignored
|
||||
createRetry(obj2, { destinationId: obj2.id }), // retries that have `id` that matches `destinationId` are ignored
|
||||
createRetry(obj3, { destinationId: 'id-X' }), // this retry will get added to the `importStateMap`!
|
||||
createRetry(obj4, { destinationId: 'id-Y', createNewCopy: true }), // this retry will get added to the `importStateMap`!
|
||||
];
|
||||
const params = { objects, retries, createNewCopies: false };
|
||||
|
||||
const result = await getImportStateMapForRetries(params);
|
||||
expect(result).toEqual(
|
||||
new Map([
|
||||
[`${obj3.type}:${obj3.id}`, { destinationId: 'id-X', omitOriginId: false }],
|
||||
[`${obj4.type}:${obj4.id}`, { destinationId: 'id-Y', omitOriginId: true }],
|
||||
])
|
||||
);
|
||||
});
|
||||
|
||||
test('omits origin ID in `importStateMap` entries when createNewCopies=true', async () => {
|
||||
const obj1 = { type: 'type-1', id: 'id-1' };
|
||||
const objects = [obj1] as SavedObject[];
|
||||
const retries = [createRetry(obj1, { destinationId: 'id-X' })];
|
||||
const params = { objects, retries, createNewCopies: true };
|
||||
|
||||
const result = await getImportStateMapForRetries(params);
|
||||
expect(result).toEqual(
|
||||
new Map([[`${obj1.type}:${obj1.id}`, { destinationId: 'id-X', omitOriginId: true }]])
|
||||
);
|
||||
});
|
||||
});
|
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { SavedObject, SavedObjectsImportRetry } from '../../types';
|
||||
import type { ImportStateMap } from './types';
|
||||
|
||||
interface GetImportStateMapForRetriesParams {
|
||||
objects: SavedObject[];
|
||||
retries: SavedObjectsImportRetry[];
|
||||
createNewCopies: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Assume that all objects exist in the `retries` map (due to filtering at the beginning of `resolveSavedObjectsImportErrors`).
|
||||
*/
|
||||
export function getImportStateMapForRetries(params: GetImportStateMapForRetriesParams) {
|
||||
const { objects, retries, createNewCopies } = params;
|
||||
|
||||
const retryMap = retries.reduce(
|
||||
(acc, cur) => acc.set(`${cur.type}:${cur.id}`, cur),
|
||||
new Map<string, SavedObjectsImportRetry>()
|
||||
);
|
||||
const importStateMap: ImportStateMap = new Map();
|
||||
|
||||
objects.forEach(({ type, id }) => {
|
||||
const retry = retryMap.get(`${type}:${id}`);
|
||||
if (!retry) {
|
||||
throw new Error(`Retry was expected for "${type}:${id}" but not found`);
|
||||
}
|
||||
const { destinationId } = retry;
|
||||
const omitOriginId = createNewCopies || Boolean(retry.createNewCopy);
|
||||
if (destinationId && destinationId !== id) {
|
||||
importStateMap.set(`${type}:${id}`, { destinationId, omitOriginId });
|
||||
}
|
||||
});
|
||||
|
||||
return importStateMap;
|
||||
}
|
|
@ -7,15 +7,18 @@
|
|||
*/
|
||||
|
||||
export { checkConflicts } from './check_conflicts';
|
||||
export { checkOriginConflicts, getImportIdMapForRetries } from './check_origin_conflicts';
|
||||
export { checkReferenceOrigins } from './check_reference_origins';
|
||||
export { checkOriginConflicts } from './check_origin_conflicts';
|
||||
export { collectSavedObjects } from './collect_saved_objects';
|
||||
export { createLimitStream } from './create_limit_stream';
|
||||
export { createObjectsFilter } from './create_objects_filter';
|
||||
export { createSavedObjects } from './create_saved_objects';
|
||||
export { extractErrors } from './extract_errors';
|
||||
export { getImportStateMapForRetries } from './get_import_state_map_for_retries';
|
||||
export { getNonUniqueEntries } from './get_non_unique_entries';
|
||||
export { regenerateIds } from './regenerate_ids';
|
||||
export { splitOverwrites } from './split_overwrites';
|
||||
export { getNonExistingReferenceAsKeys, validateReferences } from './validate_references';
|
||||
export { validateReferences } from './validate_references';
|
||||
export { validateRetries } from './validate_retries';
|
||||
export { executeImportHooks } from './execute_import_hooks';
|
||||
export type { ImportStateMap, ImportStateValue } from './types';
|
||||
|
|
|
@ -6,37 +6,31 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { mockUuidv4 } from './__mocks__';
|
||||
import { regenerateIds } from './regenerate_ids';
|
||||
import { SavedObject } from '../../types';
|
||||
|
||||
jest.mock('uuid', () => ({
|
||||
v4: jest
|
||||
.fn()
|
||||
.mockReturnValueOnce('uuidv4 #1')
|
||||
.mockReturnValueOnce('uuidv4 #2')
|
||||
.mockReturnValueOnce('uuidv4 #3'),
|
||||
}));
|
||||
|
||||
describe('#regenerateIds', () => {
|
||||
const objects = [
|
||||
{ type: 'foo', id: '1' },
|
||||
{ type: 'bar', id: '2' },
|
||||
{ type: 'baz', id: '3' },
|
||||
] as any as SavedObject[];
|
||||
] as SavedObject[];
|
||||
|
||||
test('returns expected values', () => {
|
||||
mockUuidv4
|
||||
.mockReturnValueOnce('uuidv4 #1')
|
||||
.mockReturnValueOnce('uuidv4 #2')
|
||||
.mockReturnValueOnce('uuidv4 #3');
|
||||
expect(regenerateIds(objects)).toMatchInlineSnapshot(`
|
||||
Map {
|
||||
"foo:1" => Object {
|
||||
"id": "uuidv4 #1",
|
||||
"omitOriginId": true,
|
||||
},
|
||||
"bar:2" => Object {
|
||||
"id": "uuidv4 #2",
|
||||
"omitOriginId": true,
|
||||
},
|
||||
"baz:3" => Object {
|
||||
"id": "uuidv4 #3",
|
||||
"omitOriginId": true,
|
||||
},
|
||||
}
|
||||
`);
|
||||
expect(regenerateIds(objects)).toEqual(
|
||||
new Map([
|
||||
['foo:1', { destinationId: 'uuidv4 #1', omitOriginId: true }],
|
||||
['bar:2', { destinationId: 'uuidv4 #2', omitOriginId: true }],
|
||||
['baz:3', { destinationId: 'uuidv4 #3', omitOriginId: true }],
|
||||
])
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -8,15 +8,17 @@
|
|||
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { SavedObject } from '../../types';
|
||||
import type { ImportStateMap } from './types';
|
||||
|
||||
/**
|
||||
* Takes an array of saved objects and returns an importIdMap of randomly-generated new IDs.
|
||||
* Takes an array of saved objects and returns an importStateMap of randomly-generated new IDs.
|
||||
*
|
||||
* @param objects The saved objects to generate new IDs for.
|
||||
*/
|
||||
export const regenerateIds = (objects: SavedObject[]) => {
|
||||
const importIdMap = objects.reduce((acc, object) => {
|
||||
return acc.set(`${object.type}:${object.id}`, { id: uuidv4(), omitOriginId: true });
|
||||
}, new Map<string, { id: string; omitOriginId?: boolean }>());
|
||||
return importIdMap;
|
||||
const importStateMap: ImportStateMap = new Map();
|
||||
for (const { type, id } of objects) {
|
||||
importStateMap.set(`${type}:${id}`, { destinationId: uuidv4(), omitOriginId: true });
|
||||
}
|
||||
return importStateMap;
|
||||
};
|
||||
|
|
35
src/core/server/saved_objects/import/lib/types.ts
Normal file
35
src/core/server/saved_objects/import/lib/types.ts
Normal file
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This map contains entries for objects that are included in the import operation. The entry key is the object's `type:id`, and the entry
|
||||
* value contains optional attributes which change how that object is created. The initial map that is created by the collectSavedObjects
|
||||
* module contains one entry with an empty value for each object that is being imported.
|
||||
*
|
||||
* This map is meant to function as a sort of accumulator; each module that is called during the import process can emit new entries that
|
||||
* will override those from the initial map.
|
||||
*/
|
||||
export type ImportStateMap = Map<string, ImportStateValue>;
|
||||
|
||||
/**
|
||||
* The value of an import state entry, which contains optional attributes that change how the object is created.
|
||||
*/
|
||||
export interface ImportStateValue {
|
||||
/**
|
||||
* This attribute indicates that the object for this entry is *only* a reference, it does not exist in the import file.
|
||||
*/
|
||||
isOnlyReference?: boolean;
|
||||
/**
|
||||
* This attribute indicates that the object should have this ID instead of what was specified in the import file.
|
||||
*/
|
||||
destinationId?: string;
|
||||
/**
|
||||
* This attribute indicates that the object's originId should be cleared.
|
||||
*/
|
||||
omitOriginId?: boolean;
|
||||
}
|
26
src/core/server/saved_objects/import/lib/utils.test.ts
Normal file
26
src/core/server/saved_objects/import/lib/utils.test.ts
Normal file
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { createOriginQuery } from './utils';
|
||||
|
||||
describe('createOriginQuery', () => {
|
||||
it('returns expected simple query string', () => {
|
||||
const result = createOriginQuery('a', 'b');
|
||||
expect(result).toEqual('"a:b" | "b"');
|
||||
});
|
||||
|
||||
it('escapes double quotes', () => {
|
||||
const result = createOriginQuery('a"', 'b"');
|
||||
expect(result).toEqual('"a\\":b\\"" | "b\\""');
|
||||
});
|
||||
|
||||
it('escapes backslashes', () => {
|
||||
const result = createOriginQuery('a\\', 'b\\');
|
||||
expect(result).toEqual('"a\\\\:b\\\\" | "b\\\\"');
|
||||
});
|
||||
});
|
26
src/core/server/saved_objects/import/lib/utils.ts
Normal file
26
src/core/server/saved_objects/import/lib/utils.ts
Normal file
|
@ -0,0 +1,26 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
function createOriginQueryTerm(input: string) {
|
||||
return input.replace(/\\/g, '\\\\').replace(/\"/g, '\\"');
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* Constructs a simple query string for an object that will match any existing objects with the same origin.
|
||||
* This matches based on the object's raw document ID (_id) or the object's originId.
|
||||
*
|
||||
* @param type a saved object type
|
||||
* @param id a saved object ID to check; this should be the object's originId if present, otherwise it should be the object's ID
|
||||
* @returns a simple query string
|
||||
*/
|
||||
export function createOriginQuery(type: string, id: string) {
|
||||
// 1st query term will match raw object IDs (_id), 2nd query term will match originId
|
||||
// we intentionally do not include a namespace prefix for the raw object IDs, because this search is only for multi-namespace object types
|
||||
return `"${createOriginQueryTerm(`${type}:${id}`)}" | "${createOriginQueryTerm(id)}"`;
|
||||
}
|
|
@ -6,549 +6,251 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { getNonExistingReferenceAsKeys, validateReferences } from './validate_references';
|
||||
import type { ValidateReferencesParams } from './validate_references';
|
||||
import { validateReferences } from './validate_references';
|
||||
import { savedObjectsClientMock } from '../../../mocks';
|
||||
import { SavedObjectsErrorHelpers } from '../../service';
|
||||
|
||||
describe('getNonExistingReferenceAsKeys()', () => {
|
||||
function setup({
|
||||
objects = [],
|
||||
namespace,
|
||||
importStateMap = new Map(),
|
||||
retries,
|
||||
}: Partial<Omit<ValidateReferencesParams, 'savedObjectsClient'>> = {}) {
|
||||
const savedObjectsClient = savedObjectsClientMock.create();
|
||||
return { objects, savedObjectsClient, namespace, importStateMap, retries };
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
test('returns empty response when no objects exist', async () => {
|
||||
const result = await getNonExistingReferenceAsKeys([], savedObjectsClient);
|
||||
expect(result).toEqual([]);
|
||||
expect(savedObjectsClient.bulkGet).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test('skips objects when ignoreMissingReferences is included in retry', async () => {
|
||||
const savedObjects = [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [{ name: 'ref_0', type: 'index-pattern', id: '1' }],
|
||||
},
|
||||
];
|
||||
const retries = [
|
||||
{
|
||||
type: 'visualization',
|
||||
id: '2',
|
||||
overwrite: false,
|
||||
replaceReferences: [],
|
||||
ignoreMissingReferences: true,
|
||||
},
|
||||
];
|
||||
const result = await getNonExistingReferenceAsKeys(
|
||||
savedObjects,
|
||||
savedObjectsClient,
|
||||
undefined,
|
||||
retries
|
||||
);
|
||||
expect(result).toEqual([]);
|
||||
expect(savedObjectsClient.bulkGet).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('removes references that exist within savedObjects', async () => {
|
||||
const savedObjects = [
|
||||
{
|
||||
id: '1',
|
||||
type: 'index-pattern',
|
||||
attributes: {},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [
|
||||
{
|
||||
name: 'ref_0',
|
||||
type: 'index-pattern',
|
||||
id: '1',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
const result = await getNonExistingReferenceAsKeys(savedObjects, savedObjectsClient);
|
||||
expect(result).toEqual([]);
|
||||
expect(savedObjectsClient.bulkGet).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test('removes references that exist within es', async () => {
|
||||
const savedObjects = [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [
|
||||
{
|
||||
name: 'ref_0',
|
||||
type: 'index-pattern',
|
||||
id: '1',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
savedObjectsClient.bulkGet.mockResolvedValueOnce({
|
||||
saved_objects: [
|
||||
{
|
||||
id: '1',
|
||||
type: 'index-pattern',
|
||||
attributes: {},
|
||||
references: [],
|
||||
},
|
||||
],
|
||||
});
|
||||
const result = await getNonExistingReferenceAsKeys(savedObjects, savedObjectsClient);
|
||||
expect(result).toEqual([]);
|
||||
expect(savedObjectsClient.bulkGet).toMatchInlineSnapshot(`
|
||||
[MockFunction] {
|
||||
"calls": Array [
|
||||
Array [
|
||||
Array [
|
||||
Object {
|
||||
"fields": Array [
|
||||
"id",
|
||||
],
|
||||
"id": "1",
|
||||
"type": "index-pattern",
|
||||
},
|
||||
],
|
||||
Object {
|
||||
"namespace": undefined,
|
||||
},
|
||||
],
|
||||
],
|
||||
"results": Array [
|
||||
Object {
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test(`doesn't handle saved object types outside of ENFORCED_TYPES`, async () => {
|
||||
const savedObjects = [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [
|
||||
{
|
||||
name: 'ref_0',
|
||||
type: 'foo',
|
||||
id: '1',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
const result = await getNonExistingReferenceAsKeys(savedObjects, savedObjectsClient);
|
||||
expect(result).toEqual([]);
|
||||
expect(savedObjectsClient.bulkGet).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test('returns references within ENFORCED_TYPES when they are missing', async () => {
|
||||
const savedObjects = [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [
|
||||
{
|
||||
name: 'ref_0',
|
||||
type: 'index-pattern',
|
||||
id: '1',
|
||||
},
|
||||
{
|
||||
name: 'ref_1',
|
||||
type: 'search',
|
||||
id: '3',
|
||||
},
|
||||
{
|
||||
name: 'ref_2',
|
||||
type: 'foo',
|
||||
id: '4',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
savedObjectsClient.bulkGet.mockResolvedValueOnce({
|
||||
saved_objects: [
|
||||
{
|
||||
id: '1',
|
||||
type: 'index-pattern',
|
||||
error: SavedObjectsErrorHelpers.createGenericNotFoundError('index-pattern', '1').output
|
||||
.payload,
|
||||
attributes: {},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '3',
|
||||
type: 'search',
|
||||
error: SavedObjectsErrorHelpers.createGenericNotFoundError('search', '3').output.payload,
|
||||
attributes: {},
|
||||
references: [],
|
||||
},
|
||||
],
|
||||
});
|
||||
const result = await getNonExistingReferenceAsKeys(savedObjects, savedObjectsClient);
|
||||
expect(result).toEqual(['index-pattern:1', 'search:3']);
|
||||
expect(savedObjectsClient.bulkGet).toMatchInlineSnapshot(`
|
||||
[MockFunction] {
|
||||
"calls": Array [
|
||||
Array [
|
||||
Array [
|
||||
Object {
|
||||
"fields": Array [
|
||||
"id",
|
||||
],
|
||||
"id": "1",
|
||||
"type": "index-pattern",
|
||||
},
|
||||
Object {
|
||||
"fields": Array [
|
||||
"id",
|
||||
],
|
||||
"id": "3",
|
||||
"type": "search",
|
||||
},
|
||||
],
|
||||
Object {
|
||||
"namespace": undefined,
|
||||
},
|
||||
],
|
||||
],
|
||||
"results": Array [
|
||||
Object {
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
});
|
||||
function createNotFoundError({ type, id }: { type: string; id: string }) {
|
||||
const error = SavedObjectsErrorHelpers.createGenericNotFoundError(type, id).output.payload;
|
||||
return { type, id, error, attributes: {}, references: [] };
|
||||
}
|
||||
|
||||
describe('validateReferences()', () => {
|
||||
const savedObjectsClient = savedObjectsClientMock.create();
|
||||
test('does not call cluster and returns empty when no objects are passed in', async () => {
|
||||
const params = setup();
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
test('returns empty when no objects are passed in', async () => {
|
||||
const result = await validateReferences([], savedObjectsClient);
|
||||
const result = await validateReferences(params);
|
||||
expect(result).toEqual([]);
|
||||
expect(savedObjectsClient.bulkGet).toHaveBeenCalledTimes(0);
|
||||
expect(params.savedObjectsClient.bulkGet).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('returns errors when references are missing', async () => {
|
||||
savedObjectsClient.bulkGet.mockResolvedValue({
|
||||
saved_objects: [
|
||||
const params = setup({
|
||||
objects: [
|
||||
{
|
||||
type: 'index-pattern',
|
||||
id: '3',
|
||||
error: SavedObjectsErrorHelpers.createGenericNotFoundError('index-pattern', '3').output
|
||||
.payload,
|
||||
id: '1',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
type: 'index-pattern',
|
||||
id: '5',
|
||||
error: SavedObjectsErrorHelpers.createGenericNotFoundError('index-pattern', '5').output
|
||||
.payload,
|
||||
attributes: {},
|
||||
references: [],
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: { title: 'My Visualization 2' },
|
||||
references: [{ name: 'ref_0', type: 'index-pattern', id: '3' }],
|
||||
},
|
||||
{
|
||||
type: 'index-pattern',
|
||||
id: '6',
|
||||
error: SavedObjectsErrorHelpers.createGenericNotFoundError('index-pattern', '6').output
|
||||
.payload,
|
||||
id: '4',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
type: 'search',
|
||||
id: '7',
|
||||
error: SavedObjectsErrorHelpers.createGenericNotFoundError('search', '7').output.payload,
|
||||
attributes: {},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '8',
|
||||
type: 'search',
|
||||
attributes: {},
|
||||
references: [],
|
||||
references: [
|
||||
{ name: 'ref_0', type: 'index-pattern', id: '5' },
|
||||
{ name: 'ref_1', type: 'index-pattern', id: '6' },
|
||||
{ name: 'ref_2', type: 'search', id: '7' },
|
||||
{ name: 'ref_3', type: 'search', id: '8' },
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
const savedObjects = [
|
||||
{
|
||||
id: '1',
|
||||
params.savedObjectsClient.bulkGet.mockResolvedValue({
|
||||
saved_objects: [
|
||||
createNotFoundError({ type: 'index-pattern', id: '3' }),
|
||||
createNotFoundError({ type: 'index-pattern', id: '5' }),
|
||||
createNotFoundError({ type: 'index-pattern', id: '6' }),
|
||||
createNotFoundError({ type: 'search', id: '7' }),
|
||||
{ id: '8', type: 'search', attributes: {}, references: [] },
|
||||
],
|
||||
});
|
||||
|
||||
const result = await validateReferences(params);
|
||||
expect(result).toEqual([
|
||||
expect.objectContaining({
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {
|
||||
title: 'My Visualization 2',
|
||||
error: {
|
||||
type: 'missing_references',
|
||||
references: [{ type: 'index-pattern', id: '3' }],
|
||||
},
|
||||
references: [
|
||||
{
|
||||
name: 'ref_0',
|
||||
type: 'index-pattern',
|
||||
id: '3',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
}),
|
||||
expect.objectContaining({
|
||||
type: 'visualization',
|
||||
id: '4',
|
||||
type: 'visualization',
|
||||
attributes: {
|
||||
title: 'My Visualization 4',
|
||||
},
|
||||
references: [
|
||||
{
|
||||
name: 'ref_0',
|
||||
type: 'index-pattern',
|
||||
id: '5',
|
||||
},
|
||||
{
|
||||
name: 'ref_1',
|
||||
type: 'index-pattern',
|
||||
id: '6',
|
||||
},
|
||||
{
|
||||
name: 'ref_2',
|
||||
type: 'search',
|
||||
id: '7',
|
||||
},
|
||||
{
|
||||
name: 'ref_3',
|
||||
type: 'search',
|
||||
id: '8',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
const result = await validateReferences(savedObjects, savedObjectsClient);
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"error": Object {
|
||||
"references": Array [
|
||||
Object {
|
||||
"id": "3",
|
||||
"type": "index-pattern",
|
||||
},
|
||||
],
|
||||
"type": "missing_references",
|
||||
},
|
||||
"id": "2",
|
||||
"meta": Object {
|
||||
"title": "My Visualization 2",
|
||||
},
|
||||
"title": "My Visualization 2",
|
||||
"type": "visualization",
|
||||
},
|
||||
Object {
|
||||
"error": Object {
|
||||
"references": Array [
|
||||
Object {
|
||||
"id": "5",
|
||||
"type": "index-pattern",
|
||||
},
|
||||
Object {
|
||||
"id": "6",
|
||||
"type": "index-pattern",
|
||||
},
|
||||
Object {
|
||||
"id": "7",
|
||||
"type": "search",
|
||||
},
|
||||
],
|
||||
"type": "missing_references",
|
||||
},
|
||||
"id": "4",
|
||||
"meta": Object {
|
||||
"title": "My Visualization 4",
|
||||
},
|
||||
"title": "My Visualization 4",
|
||||
"type": "visualization",
|
||||
},
|
||||
]
|
||||
`);
|
||||
expect(savedObjectsClient.bulkGet).toMatchInlineSnapshot(`
|
||||
[MockFunction] {
|
||||
"calls": Array [
|
||||
Array [
|
||||
Array [
|
||||
Object {
|
||||
"fields": Array [
|
||||
"id",
|
||||
],
|
||||
"id": "3",
|
||||
"type": "index-pattern",
|
||||
},
|
||||
Object {
|
||||
"fields": Array [
|
||||
"id",
|
||||
],
|
||||
"id": "5",
|
||||
"type": "index-pattern",
|
||||
},
|
||||
Object {
|
||||
"fields": Array [
|
||||
"id",
|
||||
],
|
||||
"id": "6",
|
||||
"type": "index-pattern",
|
||||
},
|
||||
Object {
|
||||
"fields": Array [
|
||||
"id",
|
||||
],
|
||||
"id": "7",
|
||||
"type": "search",
|
||||
},
|
||||
Object {
|
||||
"fields": Array [
|
||||
"id",
|
||||
],
|
||||
"id": "8",
|
||||
"type": "search",
|
||||
},
|
||||
],
|
||||
Object {
|
||||
"namespace": undefined,
|
||||
},
|
||||
error: {
|
||||
type: 'missing_references',
|
||||
references: [
|
||||
{ type: 'index-pattern', id: '5' },
|
||||
{ type: 'index-pattern', id: '6' },
|
||||
{ type: 'search', id: '7' },
|
||||
],
|
||||
],
|
||||
"results": Array [
|
||||
Object {
|
||||
"type": "return",
|
||||
"value": Promise {},
|
||||
},
|
||||
],
|
||||
}
|
||||
`);
|
||||
},
|
||||
}),
|
||||
]);
|
||||
expect(params.savedObjectsClient.bulkGet).toHaveBeenCalledTimes(1);
|
||||
expect(params.savedObjectsClient.bulkGet).toHaveBeenCalledWith(
|
||||
[
|
||||
{ type: 'index-pattern', id: '3', fields: ['id'] },
|
||||
{ type: 'index-pattern', id: '5', fields: ['id'] },
|
||||
{ type: 'index-pattern', id: '6', fields: ['id'] },
|
||||
{ type: 'search', id: '7', fields: ['id'] },
|
||||
{ type: 'search', id: '8', fields: ['id'] },
|
||||
],
|
||||
{ namespace: undefined }
|
||||
);
|
||||
});
|
||||
|
||||
test(`doesn't return errors when ignoreMissingReferences is included in retry`, async () => {
|
||||
const savedObjects = [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [{ name: 'ref_0', type: 'index-pattern', id: '1' }],
|
||||
},
|
||||
];
|
||||
const retries = [
|
||||
{
|
||||
type: 'visualization',
|
||||
id: '2',
|
||||
overwrite: false,
|
||||
replaceReferences: [],
|
||||
ignoreMissingReferences: true,
|
||||
},
|
||||
];
|
||||
const result = await validateReferences(savedObjects, savedObjectsClient, undefined, retries);
|
||||
test(`skips checking references when ignoreMissingReferences is included in retry`, async () => {
|
||||
const params = setup({
|
||||
objects: [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [{ name: 'ref_0', type: 'index-pattern', id: '1' }],
|
||||
},
|
||||
],
|
||||
retries: [
|
||||
{
|
||||
type: 'visualization',
|
||||
id: '2',
|
||||
overwrite: false,
|
||||
replaceReferences: [],
|
||||
ignoreMissingReferences: true,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const result = await validateReferences(params);
|
||||
expect(result).toEqual([]);
|
||||
expect(savedObjectsClient.bulkGet).not.toHaveBeenCalled();
|
||||
expect(params.savedObjectsClient.bulkGet).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test(`doesn't return errors when references exist in Elasticsearch`, async () => {
|
||||
savedObjectsClient.bulkGet.mockResolvedValue({
|
||||
saved_objects: [
|
||||
const params = setup({
|
||||
objects: [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [{ name: 'ref_0', type: 'index-pattern', id: '1' }],
|
||||
},
|
||||
],
|
||||
});
|
||||
params.savedObjectsClient.bulkGet.mockResolvedValue({
|
||||
saved_objects: [{ id: '1', type: 'index-pattern', attributes: {}, references: [] }],
|
||||
});
|
||||
|
||||
const result = await validateReferences(params);
|
||||
expect(result).toEqual([]);
|
||||
expect(params.savedObjectsClient.bulkGet).toHaveBeenCalledTimes(1);
|
||||
expect(params.savedObjectsClient.bulkGet).toHaveBeenCalledWith(
|
||||
[{ type: 'index-pattern', id: '1', fields: ['id'] }],
|
||||
{ namespace: undefined }
|
||||
);
|
||||
});
|
||||
|
||||
test(`skips checking references that exist within the saved objects`, async () => {
|
||||
const params = setup({
|
||||
objects: [
|
||||
{
|
||||
id: '1',
|
||||
type: 'index-pattern',
|
||||
attributes: {},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [{ name: 'ref_0', type: 'index-pattern', id: '1' }],
|
||||
},
|
||||
],
|
||||
});
|
||||
const savedObjects = [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [
|
||||
{
|
||||
name: 'ref_0',
|
||||
type: 'index-pattern',
|
||||
id: '1',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
const result = await validateReferences(savedObjects, savedObjectsClient);
|
||||
|
||||
const result = await validateReferences(params);
|
||||
expect(result).toEqual([]);
|
||||
expect(savedObjectsClient.bulkGet).toHaveBeenCalledTimes(1);
|
||||
expect(result).toEqual([]);
|
||||
expect(params.savedObjectsClient.bulkGet).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test(`doesn't return errors when references exist within the saved objects`, async () => {
|
||||
const savedObjects = [
|
||||
{
|
||||
id: '1',
|
||||
type: 'index-pattern',
|
||||
attributes: {},
|
||||
references: [],
|
||||
},
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [
|
||||
{
|
||||
name: 'ref_0',
|
||||
type: 'index-pattern',
|
||||
id: '1',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
const result = await validateReferences(savedObjects, savedObjectsClient);
|
||||
test(`skips checking references that are not part of ENFORCED_TYPES`, async () => {
|
||||
// this test case intentionally includes a mix of references that *will* be checked, and references that *won't* be checked
|
||||
const params = setup({
|
||||
objects: [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [
|
||||
{ name: 'ref_0', type: 'index-pattern', id: '1' },
|
||||
{ name: 'ref_2', type: 'foo', id: '2' },
|
||||
{ name: 'ref_1', type: 'search', id: '3' },
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
params.savedObjectsClient.bulkGet.mockResolvedValueOnce({
|
||||
saved_objects: [
|
||||
{ type: 'index-pattern', id: '1', attributes: {}, references: [] },
|
||||
{ type: 'search', id: '3', attributes: {}, references: [] },
|
||||
],
|
||||
});
|
||||
|
||||
const result = await validateReferences(params);
|
||||
expect(result).toEqual([]);
|
||||
expect(savedObjectsClient.bulkGet).toHaveBeenCalledTimes(0);
|
||||
expect(params.savedObjectsClient.bulkGet).toHaveBeenCalledTimes(1);
|
||||
expect(params.savedObjectsClient.bulkGet).toHaveBeenCalledWith(
|
||||
[
|
||||
{ type: 'index-pattern', id: '1', fields: ['id'] },
|
||||
// foo:2 is not included in the cluster call
|
||||
{ type: 'search', id: '3', fields: ['id'] },
|
||||
],
|
||||
{ namespace: undefined }
|
||||
);
|
||||
});
|
||||
|
||||
test(`doesn't validate references on types not part of ENFORCED_TYPES`, async () => {
|
||||
const savedObjects = [
|
||||
{
|
||||
id: '1',
|
||||
type: 'dashboard',
|
||||
attributes: {},
|
||||
references: [
|
||||
{
|
||||
name: 'ref_0',
|
||||
type: 'visualization',
|
||||
id: '2',
|
||||
},
|
||||
{
|
||||
name: 'ref_1',
|
||||
type: 'other-type',
|
||||
id: '3',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
const result = await validateReferences(savedObjects, savedObjectsClient);
|
||||
test('skips checking references when an importStateMap entry indicates that we have already found an origin match with a different ID', async () => {
|
||||
const params = setup({
|
||||
objects: [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [{ name: 'ref_0', type: 'index-pattern', id: '1' }],
|
||||
},
|
||||
],
|
||||
importStateMap: new Map([
|
||||
[`index-pattern:1`, { isOnlyReference: true, destinationId: 'not-1' }],
|
||||
]),
|
||||
});
|
||||
|
||||
const result = await validateReferences(params);
|
||||
expect(result).toEqual([]);
|
||||
expect(savedObjectsClient.bulkGet).toHaveBeenCalledTimes(0);
|
||||
expect(params.savedObjectsClient.bulkGet).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('throws when bulkGet fails', async () => {
|
||||
savedObjectsClient.bulkGet.mockResolvedValue({
|
||||
test('throws when bulkGet encounters an unexpected error', async () => {
|
||||
const params = setup({
|
||||
objects: [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [{ name: 'ref_0', type: 'index-pattern', id: '1' }],
|
||||
},
|
||||
],
|
||||
});
|
||||
params.savedObjectsClient.bulkGet.mockResolvedValue({
|
||||
saved_objects: [
|
||||
{
|
||||
id: '1',
|
||||
|
@ -559,24 +261,9 @@ describe('validateReferences()', () => {
|
|||
},
|
||||
],
|
||||
});
|
||||
const savedObjects = [
|
||||
{
|
||||
id: '2',
|
||||
type: 'visualization',
|
||||
attributes: {},
|
||||
references: [
|
||||
{
|
||||
name: 'ref_0',
|
||||
type: 'index-pattern',
|
||||
id: '1',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
await expect(
|
||||
validateReferences(savedObjects, savedObjectsClient)
|
||||
).rejects.toThrowErrorMatchingInlineSnapshot(
|
||||
`"Error fetching references for imported objects"`
|
||||
|
||||
await expect(() => validateReferences(params)).rejects.toThrowError(
|
||||
'Error fetching references for imported objects'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
import { SavedObject, SavedObjectsClientContract } from '../../types';
|
||||
import { SavedObjectsImportFailure, SavedObjectsImportRetry } from '../types';
|
||||
import { SavedObjectsImportError } from '../errors';
|
||||
import type { ImportStateMap } from './types';
|
||||
|
||||
const REF_TYPES_TO_VALIDATE = ['index-pattern', 'search'];
|
||||
|
||||
|
@ -22,29 +23,44 @@ const getObjectsToSkip = (retries: SavedObjectsImportRetry[] = []) =>
|
|||
new Set<string>()
|
||||
);
|
||||
|
||||
export async function getNonExistingReferenceAsKeys(
|
||||
savedObjects: SavedObject[],
|
||||
savedObjectsClient: SavedObjectsClientContract,
|
||||
namespace?: string,
|
||||
retries?: SavedObjectsImportRetry[]
|
||||
) {
|
||||
export interface ValidateReferencesParams {
|
||||
objects: Array<SavedObject<{ title?: string }>>;
|
||||
savedObjectsClient: SavedObjectsClientContract;
|
||||
namespace: string | undefined;
|
||||
importStateMap: ImportStateMap;
|
||||
retries?: SavedObjectsImportRetry[];
|
||||
}
|
||||
|
||||
async function getNonExistingReferenceAsKeys({
|
||||
objects,
|
||||
savedObjectsClient,
|
||||
namespace,
|
||||
importStateMap,
|
||||
retries,
|
||||
}: ValidateReferencesParams) {
|
||||
const objectsToSkip = getObjectsToSkip(retries);
|
||||
const collector = new Map();
|
||||
// Collect all references within objects
|
||||
for (const savedObject of savedObjects) {
|
||||
if (objectsToSkip.has(`${savedObject.type}:${savedObject.id}`)) {
|
||||
// skip objects with retries that have specified `ignoreMissingReferences`
|
||||
for (const object of objects) {
|
||||
if (objectsToSkip.has(`${object.type}:${object.id}`)) {
|
||||
// skip objects with retries that have specified `ignoreMissingReferences`, or that share an origin with an existing object that has a different ID
|
||||
continue;
|
||||
}
|
||||
const filteredReferences = (savedObject.references || []).filter(filterReferencesToValidate);
|
||||
const filteredReferences = (object.references || []).filter(filterReferencesToValidate);
|
||||
for (const { type, id } of filteredReferences) {
|
||||
const key = `${type}:${id}`;
|
||||
const { isOnlyReference, destinationId } = importStateMap.get(key) ?? {};
|
||||
if (isOnlyReference && destinationId) {
|
||||
// We previously searched for this reference and found one with a matching origin, skip validating this
|
||||
continue;
|
||||
}
|
||||
collector.set(`${type}:${id}`, { type, id });
|
||||
}
|
||||
}
|
||||
|
||||
// Remove objects that could be references
|
||||
for (const savedObject of savedObjects) {
|
||||
collector.delete(`${savedObject.type}:${savedObject.id}`);
|
||||
for (const object of objects) {
|
||||
collector.delete(`${object.type}:${object.id}`);
|
||||
}
|
||||
if (collector.size === 0) {
|
||||
return [];
|
||||
|
@ -73,23 +89,14 @@ export async function getNonExistingReferenceAsKeys(
|
|||
return [...collector.keys()];
|
||||
}
|
||||
|
||||
export async function validateReferences(
|
||||
savedObjects: Array<SavedObject<{ title?: string }>>,
|
||||
savedObjectsClient: SavedObjectsClientContract,
|
||||
namespace?: string,
|
||||
retries?: SavedObjectsImportRetry[]
|
||||
) {
|
||||
export async function validateReferences(params: ValidateReferencesParams) {
|
||||
const { objects, retries } = params;
|
||||
const objectsToSkip = getObjectsToSkip(retries);
|
||||
const errorMap: { [key: string]: SavedObjectsImportFailure } = {};
|
||||
const nonExistingReferenceKeys = await getNonExistingReferenceAsKeys(
|
||||
savedObjects,
|
||||
savedObjectsClient,
|
||||
namespace,
|
||||
retries
|
||||
);
|
||||
const nonExistingReferenceKeys = await getNonExistingReferenceAsKeys(params);
|
||||
|
||||
// Filter out objects with missing references, add to error object
|
||||
savedObjects.forEach(({ type, id, references, attributes }) => {
|
||||
objects.forEach(({ type, id, references, attributes }) => {
|
||||
if (objectsToSkip.has(`${type}:${id}`)) {
|
||||
// skip objects with retries that have specified `ignoreMissingReferences`
|
||||
return;
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import type { checkReferenceOrigins } from './lib/check_reference_origins';
|
||||
import type { validateRetries } from './lib/validate_retries';
|
||||
import type { createObjectsFilter } from './lib/create_objects_filter';
|
||||
import type { collectSavedObjects } from './lib/collect_saved_objects';
|
||||
import type { regenerateIds } from './lib/regenerate_ids';
|
||||
import type { validateReferences } from './lib/validate_references';
|
||||
import type { checkConflicts } from './lib/check_conflicts';
|
||||
import type { getImportStateMapForRetries } from './lib/get_import_state_map_for_retries';
|
||||
import type { splitOverwrites } from './lib/split_overwrites';
|
||||
import type { createSavedObjects } from './lib/create_saved_objects';
|
||||
import type { executeImportHooks } from './lib/execute_import_hooks';
|
||||
|
||||
export const mockCheckReferenceOrigins = jest.fn() as jest.MockedFunction<
|
||||
typeof checkReferenceOrigins
|
||||
>;
|
||||
jest.mock('./lib/check_reference_origins', () => ({
|
||||
checkReferenceOrigins: mockCheckReferenceOrigins,
|
||||
}));
|
||||
|
||||
export const mockValidateRetries = jest.fn() as jest.MockedFunction<typeof validateRetries>;
|
||||
jest.mock('./lib/validate_retries', () => ({
|
||||
validateRetries: mockValidateRetries,
|
||||
}));
|
||||
|
||||
export const mockCreateObjectsFilter = jest.fn() as jest.MockedFunction<typeof createObjectsFilter>;
|
||||
jest.mock('./lib/create_objects_filter', () => ({
|
||||
createObjectsFilter: mockCreateObjectsFilter,
|
||||
}));
|
||||
|
||||
export const mockCollectSavedObjects = jest.fn() as jest.MockedFunction<typeof collectSavedObjects>;
|
||||
jest.mock('./lib/collect_saved_objects', () => ({
|
||||
collectSavedObjects: mockCollectSavedObjects,
|
||||
}));
|
||||
|
||||
export const mockRegenerateIds = jest.fn() as jest.MockedFunction<typeof regenerateIds>;
|
||||
jest.mock('./lib/regenerate_ids', () => ({
|
||||
regenerateIds: mockRegenerateIds,
|
||||
}));
|
||||
|
||||
export const mockValidateReferences = jest.fn() as jest.MockedFunction<typeof validateReferences>;
|
||||
jest.mock('./lib/validate_references', () => ({
|
||||
validateReferences: mockValidateReferences,
|
||||
}));
|
||||
|
||||
export const mockCheckConflicts = jest.fn() as jest.MockedFunction<typeof checkConflicts>;
|
||||
jest.mock('./lib/check_conflicts', () => ({
|
||||
checkConflicts: mockCheckConflicts,
|
||||
}));
|
||||
|
||||
export const mockGetImportStateMapForRetries = jest.fn() as jest.MockedFunction<
|
||||
typeof getImportStateMapForRetries
|
||||
>;
|
||||
jest.mock('./lib/get_import_state_map_for_retries', () => ({
|
||||
getImportStateMapForRetries: mockGetImportStateMapForRetries,
|
||||
}));
|
||||
|
||||
export const mockSplitOverwrites = jest.fn() as jest.MockedFunction<typeof splitOverwrites>;
|
||||
jest.mock('./lib/split_overwrites', () => ({
|
||||
splitOverwrites: mockSplitOverwrites,
|
||||
}));
|
||||
|
||||
export const mockCreateSavedObjects = jest.fn() as jest.MockedFunction<typeof createSavedObjects>;
|
||||
jest.mock('./lib/create_saved_objects', () => ({
|
||||
createSavedObjects: mockCreateSavedObjects,
|
||||
}));
|
||||
|
||||
export const mockExecuteImportHooks = jest.fn() as jest.MockedFunction<typeof executeImportHooks>;
|
||||
jest.mock('./lib/execute_import_hooks', () => ({
|
||||
executeImportHooks: mockExecuteImportHooks,
|
||||
}));
|
|
@ -6,6 +6,20 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import {
|
||||
mockCheckReferenceOrigins,
|
||||
mockValidateRetries,
|
||||
mockCreateObjectsFilter,
|
||||
mockCollectSavedObjects,
|
||||
mockRegenerateIds,
|
||||
mockValidateReferences,
|
||||
mockCheckConflicts,
|
||||
mockGetImportStateMapForRetries,
|
||||
mockSplitOverwrites,
|
||||
mockCreateSavedObjects,
|
||||
mockExecuteImportHooks,
|
||||
} from './resolve_import_errors.test.mock';
|
||||
|
||||
import { Readable } from 'stream';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import {
|
||||
|
@ -25,58 +39,32 @@ import {
|
|||
ResolveSavedObjectsImportErrorsOptions,
|
||||
} from './resolve_import_errors';
|
||||
|
||||
import {
|
||||
validateRetries,
|
||||
collectSavedObjects,
|
||||
regenerateIds,
|
||||
validateReferences,
|
||||
checkConflicts,
|
||||
getImportIdMapForRetries,
|
||||
splitOverwrites,
|
||||
createSavedObjects,
|
||||
createObjectsFilter,
|
||||
executeImportHooks,
|
||||
} from './lib';
|
||||
|
||||
jest.mock('./lib/validate_retries');
|
||||
jest.mock('./lib/create_objects_filter');
|
||||
jest.mock('./lib/collect_saved_objects');
|
||||
jest.mock('./lib/regenerate_ids');
|
||||
jest.mock('./lib/validate_references');
|
||||
jest.mock('./lib/check_conflicts');
|
||||
jest.mock('./lib/check_origin_conflicts');
|
||||
jest.mock('./lib/split_overwrites');
|
||||
jest.mock('./lib/create_saved_objects');
|
||||
jest.mock('./lib/execute_import_hooks');
|
||||
|
||||
const getMockFn = <T extends (...args: any[]) => any, U>(fn: (...args: Parameters<T>) => U) =>
|
||||
fn as jest.MockedFunction<(...args: Parameters<T>) => U>;
|
||||
|
||||
describe('#importSavedObjectsFromStream', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
// mock empty output of each of these mocked modules so the import doesn't throw an error
|
||||
getMockFn(createObjectsFilter).mockReturnValue(() => false);
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCreateObjectsFilter.mockReturnValue(() => false);
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects: [],
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
});
|
||||
getMockFn(regenerateIds).mockReturnValue(new Map());
|
||||
getMockFn(validateReferences).mockResolvedValue([]);
|
||||
getMockFn(checkConflicts).mockResolvedValue({
|
||||
mockCheckReferenceOrigins.mockResolvedValue({ importStateMap: new Map() });
|
||||
mockRegenerateIds.mockReturnValue(new Map());
|
||||
mockValidateReferences.mockResolvedValue([]);
|
||||
mockCheckConflicts.mockResolvedValue({
|
||||
errors: [],
|
||||
filteredObjects: [],
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
pendingOverwrites: new Set(), // not used by resolveImportErrors, but is a required return type
|
||||
});
|
||||
getMockFn(getImportIdMapForRetries).mockReturnValue(new Map());
|
||||
getMockFn(splitOverwrites).mockReturnValue({
|
||||
mockGetImportStateMapForRetries.mockReturnValue(new Map());
|
||||
mockSplitOverwrites.mockReturnValue({
|
||||
objectsToOverwrite: [],
|
||||
objectsToNotOverwrite: [],
|
||||
});
|
||||
getMockFn(createSavedObjects).mockResolvedValue({ errors: [], createdObjects: [] });
|
||||
getMockFn(executeImportHooks).mockResolvedValue([]);
|
||||
mockCreateSavedObjects.mockResolvedValue({ errors: [], createdObjects: [] });
|
||||
mockExecuteImportHooks.mockResolvedValue([]);
|
||||
});
|
||||
|
||||
let readStream: Readable;
|
||||
|
@ -153,7 +141,7 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
/**
|
||||
* These tests use minimal mocks which don't look realistic, but are sufficient to exercise the code paths correctly. For example, for an
|
||||
* object to be imported successfully it would need to be obtained from `collectSavedObjects`, passed to `validateReferences`, passed to
|
||||
* `getImportIdMapForRetries`, passed to `createSavedObjects`, and returned from that. However, for each of the tests below, we skip the
|
||||
* `getImportStateMapForRetries`, passed to `createSavedObjects`, and returned from that. However, for each of the tests below, we skip the
|
||||
* intermediate steps in the interest of brevity.
|
||||
*/
|
||||
describe('module calls', () => {
|
||||
|
@ -162,7 +150,7 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
const options = setupOptions({ retries: [retry] });
|
||||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
expect(validateRetries).toHaveBeenCalledWith([retry]);
|
||||
expect(mockValidateRetries).toHaveBeenCalledWith([retry]);
|
||||
});
|
||||
|
||||
test('creates objects filter', async () => {
|
||||
|
@ -170,7 +158,7 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
const options = setupOptions({ retries: [retry] });
|
||||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
expect(createObjectsFilter).toHaveBeenCalledWith([retry]);
|
||||
expect(mockCreateObjectsFilter).toHaveBeenCalledWith([retry]);
|
||||
});
|
||||
|
||||
test('collects saved objects from stream', async () => {
|
||||
|
@ -182,28 +170,62 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
expect(typeRegistry.getImportableAndExportableTypes).toHaveBeenCalled();
|
||||
const filter = getMockFn(createObjectsFilter).mock.results[0].value;
|
||||
const collectSavedObjectsOptions = { readStream, objectLimit, filter, supportedTypes };
|
||||
expect(collectSavedObjects).toHaveBeenCalledWith(collectSavedObjectsOptions);
|
||||
const filter = mockCreateObjectsFilter.mock.results[0].value;
|
||||
const mockCollectSavedObjectsOptions = { readStream, objectLimit, filter, supportedTypes };
|
||||
expect(mockCollectSavedObjects).toHaveBeenCalledWith(mockCollectSavedObjectsOptions);
|
||||
});
|
||||
|
||||
test('checks reference origins', async () => {
|
||||
const retries = [createRetry()];
|
||||
const options = setupOptions({ retries });
|
||||
const collectedObjects = [createObject()];
|
||||
const importStateMap = new Map([
|
||||
[`${collectedObjects[0].type}:${collectedObjects[0].id}`, {}],
|
||||
[`foo:bar`, { isOnlyReference: true }],
|
||||
]);
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importStateMap,
|
||||
});
|
||||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
expect(mockCheckReferenceOrigins).toHaveBeenCalledWith({
|
||||
savedObjectsClient,
|
||||
typeRegistry,
|
||||
namespace,
|
||||
importStateMap,
|
||||
});
|
||||
});
|
||||
|
||||
test('validates references', async () => {
|
||||
const retries = [createRetry()];
|
||||
const options = setupOptions({ retries });
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map([
|
||||
[`${collectedObjects[0].type}:${collectedObjects[0].id}`, {}],
|
||||
[`foo:bar`, { isOnlyReference: true }],
|
||||
]),
|
||||
});
|
||||
mockCheckReferenceOrigins.mockResolvedValue({
|
||||
importStateMap: new Map([[`foo:bar`, { isOnlyReference: true, id: 'baz' }]]),
|
||||
});
|
||||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
expect(validateReferences).toHaveBeenCalledWith(
|
||||
collectedObjects,
|
||||
expect(mockValidateReferences).toHaveBeenCalledWith({
|
||||
objects: collectedObjects,
|
||||
savedObjectsClient,
|
||||
namespace,
|
||||
retries
|
||||
);
|
||||
importStateMap: new Map([
|
||||
// This importStateMap is a combination of the other two
|
||||
[`${collectedObjects[0].type}:${collectedObjects[0].id}`, {}],
|
||||
[`foo:bar`, { isOnlyReference: true, id: 'baz' }],
|
||||
]),
|
||||
retries,
|
||||
});
|
||||
});
|
||||
|
||||
test('execute import hooks', async () => {
|
||||
|
@ -212,19 +234,19 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
};
|
||||
const options = setupOptions({ importHooks });
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
});
|
||||
getMockFn(createSavedObjects).mockResolvedValueOnce({
|
||||
mockCreateSavedObjects.mockResolvedValueOnce({
|
||||
errors: [],
|
||||
createdObjects: collectedObjects,
|
||||
});
|
||||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
|
||||
expect(executeImportHooks).toHaveBeenCalledWith({
|
||||
expect(mockExecuteImportHooks).toHaveBeenCalledWith({
|
||||
objects: collectedObjects,
|
||||
importHooks,
|
||||
});
|
||||
|
@ -239,23 +261,25 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
}),
|
||||
];
|
||||
const options = setupOptions({ retries });
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects: [object],
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
});
|
||||
// mockCheckReferenceOrigins returns an empty importStateMap by default
|
||||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
const objectWithReplacedReferences = {
|
||||
...object,
|
||||
references: [{ ...object.references[0], id: 'def' }],
|
||||
};
|
||||
expect(validateReferences).toHaveBeenCalledWith(
|
||||
[objectWithReplacedReferences],
|
||||
expect(mockValidateReferences).toHaveBeenCalledWith({
|
||||
objects: [objectWithReplacedReferences],
|
||||
savedObjectsClient,
|
||||
namespace,
|
||||
retries
|
||||
);
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
retries,
|
||||
});
|
||||
});
|
||||
|
||||
test('checks conflicts', async () => {
|
||||
|
@ -263,10 +287,10 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
const retries = [createRetry()];
|
||||
const options = setupOptions({ retries, createNewCopies });
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
});
|
||||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
|
@ -277,7 +301,7 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
retries,
|
||||
createNewCopies,
|
||||
};
|
||||
expect(checkConflicts).toHaveBeenCalledWith(checkConflictsParams);
|
||||
expect(mockCheckConflicts).toHaveBeenCalledWith(checkConflictsParams);
|
||||
});
|
||||
|
||||
test('gets import ID map for retries', async () => {
|
||||
|
@ -285,76 +309,82 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
const createNewCopies = Symbol() as unknown as boolean;
|
||||
const options = setupOptions({ retries, createNewCopies });
|
||||
const filteredObjects = [createObject()];
|
||||
getMockFn(checkConflicts).mockResolvedValue({
|
||||
mockCheckConflicts.mockResolvedValue({
|
||||
errors: [],
|
||||
filteredObjects,
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
pendingOverwrites: new Set(), // not used by resolveImportErrors, but is a required return type
|
||||
});
|
||||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
const getImportIdMapForRetriesParams = { objects: filteredObjects, retries, createNewCopies };
|
||||
expect(getImportIdMapForRetries).toHaveBeenCalledWith(getImportIdMapForRetriesParams);
|
||||
const getImportStateMapForRetriesParams = {
|
||||
objects: filteredObjects,
|
||||
retries,
|
||||
createNewCopies,
|
||||
};
|
||||
expect(mockGetImportStateMapForRetries).toHaveBeenCalledWith(
|
||||
getImportStateMapForRetriesParams
|
||||
);
|
||||
});
|
||||
|
||||
test('splits objects to overwrite from those not to overwrite', async () => {
|
||||
const retries = [createRetry()];
|
||||
const options = setupOptions({ retries });
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
});
|
||||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
expect(splitOverwrites).toHaveBeenCalledWith(collectedObjects, retries);
|
||||
expect(mockSplitOverwrites).toHaveBeenCalledWith(collectedObjects, retries);
|
||||
});
|
||||
|
||||
describe('with createNewCopies disabled', () => {
|
||||
test('does not regenerate object IDs', async () => {
|
||||
const options = setupOptions();
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
});
|
||||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
expect(regenerateIds).not.toHaveBeenCalled();
|
||||
expect(mockRegenerateIds).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('creates saved objects', async () => {
|
||||
const options = setupOptions();
|
||||
const errors = [createError(), createError(), createError()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [errors[0]],
|
||||
collectedObjects: [], // doesn't matter
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
});
|
||||
getMockFn(validateReferences).mockResolvedValue([errors[1]]);
|
||||
getMockFn(checkConflicts).mockResolvedValue({
|
||||
mockValidateReferences.mockResolvedValue([errors[1]]);
|
||||
mockCheckConflicts.mockResolvedValue({
|
||||
errors: [errors[2]],
|
||||
filteredObjects: [],
|
||||
importIdMap: new Map([['foo', { id: 'someId' }]]),
|
||||
importStateMap: new Map([['foo', { destinationId: 'someId' }]]),
|
||||
pendingOverwrites: new Set(), // not used by resolveImportErrors, but is a required return type
|
||||
});
|
||||
getMockFn(getImportIdMapForRetries).mockReturnValue(
|
||||
mockGetImportStateMapForRetries.mockReturnValue(
|
||||
new Map([
|
||||
['foo', { id: 'newId' }],
|
||||
['bar', { id: 'anotherNewId' }],
|
||||
['foo', { destinationId: 'newId' }],
|
||||
['bar', { destinationId: 'anotherNewId' }],
|
||||
])
|
||||
);
|
||||
const importIdMap = new Map([
|
||||
['foo', { id: 'someId' }],
|
||||
['bar', { id: 'anotherNewId' }],
|
||||
const importStateMap = new Map([
|
||||
['foo', { destinationId: 'someId' }],
|
||||
['bar', { destinationId: 'anotherNewId' }],
|
||||
]);
|
||||
const objectsToOverwrite = [createObject()];
|
||||
const objectsToNotOverwrite = [createObject()];
|
||||
getMockFn(splitOverwrites).mockReturnValue({ objectsToOverwrite, objectsToNotOverwrite });
|
||||
getMockFn(createSavedObjects).mockResolvedValueOnce({
|
||||
errors: [createError()], // this error will NOT be passed to the second `createSavedObjects` call
|
||||
mockSplitOverwrites.mockReturnValue({ objectsToOverwrite, objectsToNotOverwrite });
|
||||
mockCreateSavedObjects.mockResolvedValueOnce({
|
||||
errors: [createError()], // this error will NOT be passed to the second `mockCreateSavedObjects` call
|
||||
createdObjects: [],
|
||||
});
|
||||
|
||||
|
@ -362,15 +392,15 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
const partialCreateSavedObjectsParams = {
|
||||
accumulatedErrors: errors,
|
||||
savedObjectsClient,
|
||||
importIdMap,
|
||||
importStateMap,
|
||||
namespace,
|
||||
};
|
||||
expect(createSavedObjects).toHaveBeenNthCalledWith(1, {
|
||||
expect(mockCreateSavedObjects).toHaveBeenNthCalledWith(1, {
|
||||
...partialCreateSavedObjectsParams,
|
||||
objects: objectsToOverwrite,
|
||||
overwrite: true,
|
||||
});
|
||||
expect(createSavedObjects).toHaveBeenNthCalledWith(2, {
|
||||
expect(mockCreateSavedObjects).toHaveBeenNthCalledWith(2, {
|
||||
...partialCreateSavedObjectsParams,
|
||||
objects: objectsToNotOverwrite,
|
||||
});
|
||||
|
@ -381,54 +411,65 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
test('regenerates object IDs', async () => {
|
||||
const options = setupOptions({ createNewCopies: true });
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
});
|
||||
|
||||
await resolveSavedObjectsImportErrors(options);
|
||||
expect(regenerateIds).toHaveBeenCalledWith(collectedObjects);
|
||||
expect(mockRegenerateIds).toHaveBeenCalledWith(collectedObjects);
|
||||
});
|
||||
|
||||
test('creates saved objects', async () => {
|
||||
const options = setupOptions({ createNewCopies: true });
|
||||
const errors = [createError(), createError(), createError()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [errors[0]],
|
||||
collectedObjects: [], // doesn't matter
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map([
|
||||
['foo', {}],
|
||||
['bar', {}],
|
||||
['baz', {}],
|
||||
['qux', { isOnlyReference: true }],
|
||||
]),
|
||||
});
|
||||
getMockFn(validateReferences).mockResolvedValue([errors[1]]);
|
||||
getMockFn(regenerateIds).mockReturnValue(
|
||||
mockCheckReferenceOrigins.mockResolvedValue({
|
||||
importStateMap: new Map([['qux', { isOnlyReference: true, destinationId: 'newId1' }]]),
|
||||
});
|
||||
mockValidateReferences.mockResolvedValue([errors[1]]);
|
||||
mockRegenerateIds.mockReturnValue(
|
||||
new Map([
|
||||
['foo', { id: 'randomId1' }],
|
||||
['bar', { id: 'randomId2' }],
|
||||
['baz', { id: 'randomId3' }],
|
||||
['foo', { destinationId: 'randomId1' }],
|
||||
['bar', { destinationId: 'randomId2' }],
|
||||
['baz', { destinationId: 'randomId3' }],
|
||||
])
|
||||
);
|
||||
getMockFn(checkConflicts).mockResolvedValue({
|
||||
mockCheckConflicts.mockResolvedValue({
|
||||
errors: [errors[2]],
|
||||
filteredObjects: [],
|
||||
importIdMap: new Map([['bar', { id: 'someId' }]]),
|
||||
importStateMap: new Map([['bar', { destinationId: 'someId' }]]),
|
||||
pendingOverwrites: new Set(), // not used by resolveImportErrors, but is a required return type
|
||||
});
|
||||
getMockFn(getImportIdMapForRetries).mockReturnValue(
|
||||
mockGetImportStateMapForRetries.mockReturnValue(
|
||||
new Map([
|
||||
['bar', { id: 'newId' }],
|
||||
['baz', { id: 'anotherNewId' }],
|
||||
['bar', { destinationId: 'newId2' }], // this is overridden by the checkConflicts result
|
||||
['baz', { destinationId: 'newId3' }],
|
||||
])
|
||||
);
|
||||
const importIdMap = new Map([
|
||||
['foo', { id: 'randomId1' }],
|
||||
['bar', { id: 'someId' }],
|
||||
['baz', { id: 'anotherNewId' }],
|
||||
|
||||
// assert that the importStateMap is correctly composed of the results from the five modules
|
||||
const importStateMap = new Map([
|
||||
['foo', { destinationId: 'randomId1' }],
|
||||
['bar', { destinationId: 'someId' }],
|
||||
['baz', { destinationId: 'newId3' }],
|
||||
['qux', { isOnlyReference: true, destinationId: 'newId1' }],
|
||||
]);
|
||||
const objectsToOverwrite = [createObject()];
|
||||
const objectsToNotOverwrite = [createObject()];
|
||||
getMockFn(splitOverwrites).mockReturnValue({ objectsToOverwrite, objectsToNotOverwrite });
|
||||
getMockFn(createSavedObjects).mockResolvedValueOnce({
|
||||
errors: [createError()], // this error will NOT be passed to the second `createSavedObjects` call
|
||||
mockSplitOverwrites.mockReturnValue({ objectsToOverwrite, objectsToNotOverwrite });
|
||||
mockCreateSavedObjects.mockResolvedValueOnce({
|
||||
errors: [createError()], // this error will NOT be passed to the second `mockCreateSavedObjects` call
|
||||
createdObjects: [],
|
||||
});
|
||||
|
||||
|
@ -436,15 +477,15 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
const partialCreateSavedObjectsParams = {
|
||||
accumulatedErrors: errors,
|
||||
savedObjectsClient,
|
||||
importIdMap,
|
||||
importStateMap,
|
||||
namespace,
|
||||
};
|
||||
expect(createSavedObjects).toHaveBeenNthCalledWith(1, {
|
||||
expect(mockCreateSavedObjects).toHaveBeenNthCalledWith(1, {
|
||||
...partialCreateSavedObjectsParams,
|
||||
objects: objectsToOverwrite,
|
||||
overwrite: true,
|
||||
});
|
||||
expect(createSavedObjects).toHaveBeenNthCalledWith(2, {
|
||||
expect(mockCreateSavedObjects).toHaveBeenNthCalledWith(2, {
|
||||
...partialCreateSavedObjectsParams,
|
||||
objects: objectsToNotOverwrite,
|
||||
});
|
||||
|
@ -462,10 +503,10 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
|
||||
test('returns success=false if an error occurred', async () => {
|
||||
const options = setupOptions();
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [createError()],
|
||||
collectedObjects: [],
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
});
|
||||
|
||||
const result = await resolveSavedObjectsImportErrors(options);
|
||||
|
@ -480,17 +521,17 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
test('executes import hooks', async () => {
|
||||
const options = setupOptions();
|
||||
const collectedObjects = [createObject()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [],
|
||||
collectedObjects,
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
});
|
||||
getMockFn(createSavedObjects).mockResolvedValueOnce({
|
||||
mockCreateSavedObjects.mockResolvedValueOnce({
|
||||
errors: [],
|
||||
createdObjects: collectedObjects,
|
||||
});
|
||||
const warnings: SavedObjectsImportWarning[] = [{ type: 'simple', message: 'foo' }];
|
||||
getMockFn(executeImportHooks).mockResolvedValue(warnings);
|
||||
mockExecuteImportHooks.mockResolvedValue(warnings);
|
||||
|
||||
const result = await resolveSavedObjectsImportErrors(options);
|
||||
|
||||
|
@ -507,11 +548,11 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
const tmp = createObject();
|
||||
const obj2 = { ...tmp, destinationId: 'some-destinationId', originId: tmp.id };
|
||||
const obj3 = { ...createObject(), destinationId: 'another-destinationId' }; // empty originId; this is a new copy
|
||||
getMockFn(createSavedObjects).mockResolvedValueOnce({
|
||||
mockCreateSavedObjects.mockResolvedValueOnce({
|
||||
errors: [error1],
|
||||
createdObjects: [obj1],
|
||||
});
|
||||
getMockFn(createSavedObjects).mockResolvedValueOnce({
|
||||
mockCreateSavedObjects.mockResolvedValueOnce({
|
||||
errors: [error2],
|
||||
createdObjects: [obj2, obj3],
|
||||
});
|
||||
|
@ -569,13 +610,13 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
},
|
||||
});
|
||||
|
||||
getMockFn(checkConflicts).mockResolvedValue({
|
||||
mockCheckConflicts.mockResolvedValue({
|
||||
errors: [],
|
||||
filteredObjects: [],
|
||||
importIdMap: new Map(),
|
||||
importStateMap: new Map(),
|
||||
pendingOverwrites: new Set(),
|
||||
});
|
||||
getMockFn(createSavedObjects)
|
||||
mockCreateSavedObjects
|
||||
.mockResolvedValueOnce({ errors: [], createdObjects: [obj1, obj2] })
|
||||
.mockResolvedValueOnce({ errors: [], createdObjects: [] });
|
||||
|
||||
|
@ -607,17 +648,17 @@ describe('#importSavedObjectsFromStream', () => {
|
|||
test('accumulates multiple errors', async () => {
|
||||
const options = setupOptions();
|
||||
const errors = [createError(), createError(), createError(), createError()];
|
||||
getMockFn(collectSavedObjects).mockResolvedValue({
|
||||
mockCollectSavedObjects.mockResolvedValue({
|
||||
errors: [errors[0]],
|
||||
collectedObjects: [],
|
||||
importIdMap: new Map(), // doesn't matter
|
||||
importStateMap: new Map(), // doesn't matter
|
||||
});
|
||||
getMockFn(validateReferences).mockResolvedValue([errors[1]]);
|
||||
getMockFn(createSavedObjects).mockResolvedValueOnce({
|
||||
mockValidateReferences.mockResolvedValue([errors[1]]);
|
||||
mockCreateSavedObjects.mockResolvedValueOnce({
|
||||
errors: [errors[2]],
|
||||
createdObjects: [],
|
||||
});
|
||||
getMockFn(createSavedObjects).mockResolvedValueOnce({
|
||||
mockCreateSavedObjects.mockResolvedValueOnce({
|
||||
errors: [errors[3]],
|
||||
createdObjects: [],
|
||||
});
|
||||
|
|
|
@ -20,10 +20,11 @@ import {
|
|||
createObjectsFilter,
|
||||
splitOverwrites,
|
||||
regenerateIds,
|
||||
checkReferenceOrigins,
|
||||
validateReferences,
|
||||
validateRetries,
|
||||
createSavedObjects,
|
||||
getImportIdMapForRetries,
|
||||
getImportStateMapForRetries,
|
||||
checkConflicts,
|
||||
executeImportHooks,
|
||||
} from './lib';
|
||||
|
@ -71,20 +72,20 @@ export async function resolveSavedObjectsImportErrors({
|
|||
|
||||
let successCount = 0;
|
||||
let errorAccumulator: SavedObjectsImportFailure[] = [];
|
||||
let importIdMap: Map<string, { id?: string; omitOriginId?: boolean }> = new Map();
|
||||
const supportedTypes = typeRegistry.getImportableAndExportableTypes().map((type) => type.name);
|
||||
const filter = createObjectsFilter(retries);
|
||||
|
||||
// Get the objects to resolve errors
|
||||
const { errors: collectorErrors, collectedObjects: objectsToResolve } = await collectSavedObjects(
|
||||
{
|
||||
readStream,
|
||||
objectLimit,
|
||||
filter,
|
||||
supportedTypes,
|
||||
}
|
||||
);
|
||||
errorAccumulator = [...errorAccumulator, ...collectorErrors];
|
||||
const collectSavedObjectsResult = await collectSavedObjects({
|
||||
readStream,
|
||||
objectLimit,
|
||||
filter,
|
||||
supportedTypes,
|
||||
});
|
||||
// Map of all IDs for objects that we are attempting to import, and any references that are not included in the read stream;
|
||||
// each value is empty by default
|
||||
let importStateMap = collectSavedObjectsResult.importStateMap;
|
||||
errorAccumulator = [...errorAccumulator, ...collectSavedObjectsResult.errors];
|
||||
|
||||
// Create a map of references to replace for each object to avoid iterating through
|
||||
// retries for every object to resolve
|
||||
|
@ -98,7 +99,7 @@ export async function resolveSavedObjectsImportErrors({
|
|||
}
|
||||
|
||||
// Replace references
|
||||
for (const savedObject of objectsToResolve) {
|
||||
for (const savedObject of collectSavedObjectsResult.collectedObjects) {
|
||||
const refMap = retriesReferencesMap.get(`${savedObject.type}:${savedObject.id}`);
|
||||
if (!refMap) {
|
||||
continue;
|
||||
|
@ -106,28 +107,42 @@ export async function resolveSavedObjectsImportErrors({
|
|||
for (const reference of savedObject.references || []) {
|
||||
if (refMap[`${reference.type}:${reference.id}`]) {
|
||||
reference.id = refMap[`${reference.type}:${reference.id}`];
|
||||
// Any reference ID changed here will supersede the results of checkReferenceOrigins below; this is intentional.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check any references that aren't included in the import file and retries, to see if they have a match with a different origin
|
||||
const checkReferenceOriginsResult = await checkReferenceOrigins({
|
||||
savedObjectsClient,
|
||||
typeRegistry,
|
||||
namespace,
|
||||
importStateMap,
|
||||
});
|
||||
importStateMap = new Map([...importStateMap, ...checkReferenceOriginsResult.importStateMap]);
|
||||
|
||||
// Validate references
|
||||
const validateReferencesResult = await validateReferences(
|
||||
objectsToResolve,
|
||||
const validateReferencesResult = await validateReferences({
|
||||
objects: collectSavedObjectsResult.collectedObjects,
|
||||
savedObjectsClient,
|
||||
namespace,
|
||||
retries
|
||||
);
|
||||
importStateMap,
|
||||
retries,
|
||||
});
|
||||
errorAccumulator = [...errorAccumulator, ...validateReferencesResult];
|
||||
|
||||
if (createNewCopies) {
|
||||
// In case any missing reference errors were resolved, ensure that we regenerate those object IDs as well
|
||||
// This is because a retry to resolve a missing reference error may not necessarily specify a destinationId
|
||||
importIdMap = regenerateIds(objectsToResolve);
|
||||
importStateMap = new Map([
|
||||
...importStateMap, // preserve any entries for references that aren't included in collectedObjects
|
||||
...regenerateIds(collectSavedObjectsResult.collectedObjects),
|
||||
]);
|
||||
}
|
||||
|
||||
// Check single-namespace objects for conflicts in this namespace, and check multi-namespace objects for conflicts across all namespaces
|
||||
const checkConflictsParams = {
|
||||
objects: objectsToResolve,
|
||||
objects: collectSavedObjectsResult.collectedObjects,
|
||||
savedObjectsClient,
|
||||
namespace,
|
||||
retries,
|
||||
|
@ -137,16 +152,16 @@ export async function resolveSavedObjectsImportErrors({
|
|||
errorAccumulator = [...errorAccumulator, ...checkConflictsResult.errors];
|
||||
|
||||
// Check multi-namespace object types for regular conflicts and ambiguous conflicts
|
||||
const getImportIdMapForRetriesParams = {
|
||||
const getImportStateMapForRetriesParams = {
|
||||
objects: checkConflictsResult.filteredObjects,
|
||||
retries,
|
||||
createNewCopies,
|
||||
};
|
||||
const importIdMapForRetries = getImportIdMapForRetries(getImportIdMapForRetriesParams);
|
||||
importIdMap = new Map([
|
||||
...importIdMap,
|
||||
...importIdMapForRetries,
|
||||
...checkConflictsResult.importIdMap, // this importIdMap takes precedence over the others
|
||||
const importStateMapForRetries = getImportStateMapForRetries(getImportStateMapForRetriesParams);
|
||||
importStateMap = new Map([
|
||||
...importStateMap,
|
||||
...importStateMapForRetries,
|
||||
...checkConflictsResult.importStateMap, // this importStateMap takes precedence over the others
|
||||
]);
|
||||
|
||||
// Bulk create in two batches, overwrites and non-overwrites
|
||||
|
@ -161,7 +176,7 @@ export async function resolveSavedObjectsImportErrors({
|
|||
objects,
|
||||
accumulatedErrors,
|
||||
savedObjectsClient,
|
||||
importIdMap,
|
||||
importStateMap,
|
||||
namespace,
|
||||
overwrite,
|
||||
};
|
||||
|
@ -191,7 +206,10 @@ export async function resolveSavedObjectsImportErrors({
|
|||
}),
|
||||
];
|
||||
};
|
||||
const { objectsToOverwrite, objectsToNotOverwrite } = splitOverwrites(objectsToResolve, retries);
|
||||
const { objectsToOverwrite, objectsToNotOverwrite } = splitOverwrites(
|
||||
collectSavedObjectsResult.collectedObjects,
|
||||
retries
|
||||
);
|
||||
await bulkCreateObjects(objectsToOverwrite, true);
|
||||
await bulkCreateObjects(objectsToNotOverwrite);
|
||||
|
||||
|
|
|
@ -6,7 +6,8 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { mockUuidv4 } from '../../import/lib/__mocks__';
|
||||
jest.mock('uuid');
|
||||
|
||||
import supertest from 'supertest';
|
||||
import { UnwrapPromise } from '@kbn/utility-types';
|
||||
import { registerImportRoute } from '../import';
|
||||
|
@ -20,7 +21,6 @@ import { SavedObjectsErrorHelpers, SavedObjectsImporter } from '../..';
|
|||
|
||||
type SetupServerReturn = UnwrapPromise<ReturnType<typeof setupServer>>;
|
||||
|
||||
const { v4: uuidv4 } = jest.requireActual('uuid');
|
||||
const allowedTypes = ['index-pattern', 'visualization', 'dashboard'];
|
||||
const config = { maxImportPayloadBytes: 26214400, maxImportExportSize: 10000 } as SavedObjectConfig;
|
||||
let coreUsageStatsClient: jest.Mocked<CoreUsageStatsClient>;
|
||||
|
@ -47,8 +47,6 @@ describe(`POST ${URL}`, () => {
|
|||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
mockUuidv4.mockReset();
|
||||
mockUuidv4.mockImplementation(() => uuidv4());
|
||||
({ server, httpSetup, handlerContext } = await setupServer());
|
||||
handlerContext.savedObjects.typeRegistry.getImportableAndExportableTypes.mockReturnValue(
|
||||
allowedTypes.map(createExportableType)
|
||||
|
@ -488,7 +486,9 @@ describe(`POST ${URL}`, () => {
|
|||
|
||||
describe('createNewCopies enabled', () => {
|
||||
it('imports objects, regenerating all IDs/reference IDs present, and resetting all origin IDs', async () => {
|
||||
mockUuidv4
|
||||
const mockUuid = jest.requireMock('uuid');
|
||||
mockUuid.v4 = jest
|
||||
.fn()
|
||||
.mockReturnValueOnce('foo') // a uuid.v4() is generated for the request.id
|
||||
.mockReturnValueOnce('foo') // another uuid.v4() is used for the request.uuid
|
||||
.mockReturnValueOnce('new-id-1')
|
||||
|
|
|
@ -6,7 +6,8 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { mockUuidv4 } from '../../import/lib/__mocks__';
|
||||
jest.mock('uuid');
|
||||
|
||||
import supertest from 'supertest';
|
||||
import { UnwrapPromise } from '@kbn/utility-types';
|
||||
import { registerResolveImportErrorsRoute } from '../resolve_import_errors';
|
||||
|
@ -20,7 +21,6 @@ import { SavedObjectsImporter } from '../..';
|
|||
|
||||
type SetupServerReturn = UnwrapPromise<ReturnType<typeof setupServer>>;
|
||||
|
||||
const { v4: uuidv4 } = jest.requireActual('uuid');
|
||||
const allowedTypes = ['index-pattern', 'visualization', 'dashboard'];
|
||||
const config = { maxImportPayloadBytes: 26214400, maxImportExportSize: 10000 } as SavedObjectConfig;
|
||||
let coreUsageStatsClient: jest.Mocked<CoreUsageStatsClient>;
|
||||
|
@ -52,8 +52,6 @@ describe(`POST ${URL}`, () => {
|
|||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
mockUuidv4.mockReset();
|
||||
mockUuidv4.mockImplementation(() => uuidv4());
|
||||
({ server, httpSetup, handlerContext } = await setupServer());
|
||||
handlerContext.savedObjects.typeRegistry.getImportableAndExportableTypes.mockReturnValue(
|
||||
allowedTypes.map(createExportableType)
|
||||
|
@ -336,7 +334,8 @@ describe(`POST ${URL}`, () => {
|
|||
|
||||
describe('createNewCopies enabled', () => {
|
||||
it('imports objects, regenerating all IDs/reference IDs present, and resetting all origin IDs', async () => {
|
||||
mockUuidv4.mockReturnValue('new-id-1');
|
||||
const mockUuid = jest.requireMock('uuid');
|
||||
mockUuid.v4 = jest.fn().mockReturnValue('new-id-1');
|
||||
savedObjectsClient.bulkGet.mockResolvedValueOnce({ saved_objects: [mockIndexPattern] });
|
||||
const obj1 = {
|
||||
type: 'visualization',
|
||||
|
|
|
@ -710,3 +710,60 @@
|
|||
"type": "doc"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"type": "doc",
|
||||
"value": {
|
||||
"id": "index-pattern:inbound-reference-origin-match-1-newId",
|
||||
"index": ".kibana",
|
||||
"source": {
|
||||
"originId": "inbound-reference-origin-match-1",
|
||||
"index-pattern": {
|
||||
"title": "This is used to test if an imported object with a reference to this originId will be remapped properly"
|
||||
},
|
||||
"namespaces": ["*"],
|
||||
"type": "index-pattern",
|
||||
"migrationVersion": { "index-pattern": "8.0.0" },
|
||||
"updated_at": "2017-09-21T18:49:16.270Z"
|
||||
},
|
||||
"type": "doc"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"type": "doc",
|
||||
"value": {
|
||||
"id": "index-pattern:inbound-reference-origin-match-2a",
|
||||
"index": ".kibana",
|
||||
"source": {
|
||||
"originId": "inbound-reference-origin-match-2",
|
||||
"index-pattern": {
|
||||
"title": "This is used to test if an imported object with a reference to this originId will *not* be remapped"
|
||||
},
|
||||
"namespaces": ["*"],
|
||||
"type": "index-pattern",
|
||||
"migrationVersion": { "index-pattern": "8.0.0" },
|
||||
"updated_at": "2017-09-21T18:49:16.270Z"
|
||||
},
|
||||
"type": "doc"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"type": "doc",
|
||||
"value": {
|
||||
"id": "index-pattern:inbound-reference-origin-match-2b",
|
||||
"index": ".kibana",
|
||||
"source": {
|
||||
"originId": "inbound-reference-origin-match-2",
|
||||
"index-pattern": {
|
||||
"title": "This is used to test if an imported object with a reference to this originId will *not* be remapped"
|
||||
},
|
||||
"namespaces": ["*"],
|
||||
"type": "index-pattern",
|
||||
"migrationVersion": { "index-pattern": "8.0.0" },
|
||||
"updated_at": "2017-09-21T18:49:16.270Z"
|
||||
},
|
||||
"type": "doc"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,23 +8,34 @@
|
|||
import expect from '@kbn/expect';
|
||||
import { SuperTest } from 'supertest';
|
||||
import type { Client } from '@elastic/elasticsearch';
|
||||
import type { SavedObjectReference } from 'src/core/server';
|
||||
import { SAVED_OBJECT_TEST_CASES as CASES } from '../lib/saved_object_test_cases';
|
||||
import { SPACES } from '../lib/spaces';
|
||||
import { expectResponses, getUrlPrefix, getTestTitle } from '../lib/saved_object_test_utils';
|
||||
import { ExpectResponseBody, TestCase, TestDefinition, TestSuite } from '../lib/types';
|
||||
|
||||
export interface ImportTestDefinition extends TestDefinition {
|
||||
request: Array<{ type: string; id: string; originId?: string }>;
|
||||
request: Array<{
|
||||
type: string;
|
||||
id: string;
|
||||
originId?: string;
|
||||
references?: SavedObjectReference[];
|
||||
}>;
|
||||
overwrite: boolean;
|
||||
createNewCopies: boolean;
|
||||
}
|
||||
export type ImportTestSuite = TestSuite<ImportTestDefinition>;
|
||||
export interface ImportTestCase extends TestCase {
|
||||
export type FailureType =
|
||||
| 'unsupported_type'
|
||||
| 'conflict'
|
||||
| 'ambiguous_conflict'
|
||||
| 'missing_references';
|
||||
export interface ImportTestCase extends Omit<TestCase, 'failure'> {
|
||||
originId?: string;
|
||||
expectedNewId?: string;
|
||||
references?: SavedObjectReference[];
|
||||
successParam?: string;
|
||||
failure?: 400 | 409; // only used for permitted response case
|
||||
fail409Param?: string;
|
||||
failureType?: FailureType; // only used for permitted response case
|
||||
}
|
||||
|
||||
const NEW_ATTRIBUTE_KEY = 'title'; // all type mappings include this attribute, for simplicity's sake
|
||||
|
@ -37,33 +48,60 @@ const NEW_ATTRIBUTE_VAL = `New attribute value ${Date.now()}`;
|
|||
// * id: conflict_3
|
||||
// * id: conflict_4a, originId: conflict_4
|
||||
// using the seven conflict test case objects below, we can exercise various permutations of exact/inexact/ambiguous conflict scenarios
|
||||
const CID = 'conflict_';
|
||||
const { HIDDEN, ...REMAINING_CASES } = CASES;
|
||||
export const TEST_CASES: Record<string, ImportTestCase> = Object.freeze({
|
||||
...CASES,
|
||||
CONFLICT_1_OBJ: Object.freeze({ type: 'sharedtype', id: `${CID}1` }),
|
||||
CONFLICT_1A_OBJ: Object.freeze({ type: 'sharedtype', id: `${CID}1a`, originId: `${CID}1` }),
|
||||
CONFLICT_1B_OBJ: Object.freeze({ type: 'sharedtype', id: `${CID}1b`, originId: `${CID}1` }),
|
||||
CONFLICT_2C_OBJ: Object.freeze({ type: 'sharedtype', id: `${CID}2c`, originId: `${CID}2` }),
|
||||
CONFLICT_2D_OBJ: Object.freeze({ type: 'sharedtype', id: `${CID}2d`, originId: `${CID}2` }),
|
||||
...REMAINING_CASES,
|
||||
CONFLICT_1_OBJ: Object.freeze({ type: 'sharedtype', id: `conflict_1` }),
|
||||
CONFLICT_1A_OBJ: Object.freeze({ type: 'sharedtype', id: `conflict_1a`, originId: `conflict_1` }),
|
||||
CONFLICT_1B_OBJ: Object.freeze({ type: 'sharedtype', id: `conflict_1b`, originId: `conflict_1` }),
|
||||
CONFLICT_2A_OBJ: Object.freeze({ type: 'sharedtype', id: `conflict_2a`, originId: `conflict_2` }),
|
||||
CONFLICT_2C_OBJ: Object.freeze({ type: 'sharedtype', id: `conflict_2c`, originId: `conflict_2` }),
|
||||
CONFLICT_2D_OBJ: Object.freeze({ type: 'sharedtype', id: `conflict_2d`, originId: `conflict_2` }),
|
||||
CONFLICT_3A_OBJ: Object.freeze({
|
||||
type: 'sharedtype',
|
||||
id: `${CID}3a`,
|
||||
originId: `${CID}3`,
|
||||
expectedNewId: `${CID}3`,
|
||||
id: `conflict_3a`,
|
||||
originId: `conflict_3`,
|
||||
expectedNewId: `conflict_3`,
|
||||
}),
|
||||
CONFLICT_4_OBJ: Object.freeze({
|
||||
type: 'sharedtype',
|
||||
id: `conflict_4`,
|
||||
expectedNewId: `conflict_4a`,
|
||||
}),
|
||||
CONFLICT_4_OBJ: Object.freeze({ type: 'sharedtype', id: `${CID}4`, expectedNewId: `${CID}4a` }),
|
||||
NEW_SINGLE_NAMESPACE_OBJ: Object.freeze({ type: 'isolatedtype', id: 'new-isolatedtype-id' }),
|
||||
NEW_MULTI_NAMESPACE_OBJ: Object.freeze({ type: 'sharedtype', id: 'new-sharedtype-id' }),
|
||||
NEW_NAMESPACE_AGNOSTIC_OBJ: Object.freeze({ type: 'globaltype', id: 'new-globaltype-id' }),
|
||||
});
|
||||
export const SPECIAL_TEST_CASES: Record<string, ImportTestCase> = Object.freeze({
|
||||
HIDDEN,
|
||||
OUTBOUND_REFERENCE_ORIGIN_MATCH_1_OBJ: Object.freeze({
|
||||
// This object does not already exist, but it has a reference to the originId of an index pattern that does exist.
|
||||
// We use index patterns because they are one of the few reference types that are validated, so the import will fail if the reference
|
||||
// is broken.
|
||||
// This import is designed to succeed because there is exactly one origin match for its reference, and that reference will be changed to
|
||||
// match the index pattern's new ID.
|
||||
type: 'sharedtype',
|
||||
id: 'outbound-reference-origin-match-1',
|
||||
references: [{ name: '1', type: 'index-pattern', id: 'inbound-reference-origin-match-1' }],
|
||||
}),
|
||||
OUTBOUND_REFERENCE_ORIGIN_MATCH_2_OBJ: Object.freeze({
|
||||
// This object does not already exist, but it has a reference to the originId of two index patterns that do exist.
|
||||
// This import is designed to fail because there are two origin matches for its reference, and we can't currently handle ambiguous
|
||||
// destinations for reference origin matches.
|
||||
type: 'sharedtype',
|
||||
id: 'outbound-reference-origin-match-2',
|
||||
references: [{ name: '1', type: 'index-pattern', id: 'inbound-reference-origin-match-2' }],
|
||||
}),
|
||||
});
|
||||
|
||||
/**
|
||||
* Test cases have additional properties that we don't want to send in HTTP Requests
|
||||
*/
|
||||
const createRequest = ({ type, id, originId }: ImportTestCase) => ({
|
||||
const createRequest = ({ type, id, originId, references }: ImportTestCase) => ({
|
||||
type,
|
||||
id,
|
||||
...(originId && { originId }),
|
||||
...(references && { references }),
|
||||
});
|
||||
|
||||
const getConflictDest = (id: string) => ({
|
||||
|
@ -72,8 +110,20 @@ const getConflictDest = (id: string) => ({
|
|||
updatedAt: '2017-09-21T18:59:16.270Z',
|
||||
});
|
||||
|
||||
export const importTestCaseFailures = {
|
||||
failUnsupportedType: (condition?: boolean): { failureType?: 'unsupported_type' } =>
|
||||
condition !== false ? { failureType: 'unsupported_type' } : {},
|
||||
failConflict: (condition?: boolean): { failureType?: 'conflict' } =>
|
||||
condition !== false ? { failureType: 'conflict' } : {},
|
||||
failAmbiguousConflict: (condition?: boolean): { failureType?: 'ambiguous_conflict' } =>
|
||||
condition !== false ? { failureType: 'ambiguous_conflict' } : {},
|
||||
failMissingReferences: (condition?: boolean): { failureType?: 'missing_references' } =>
|
||||
condition !== false ? { failureType: 'missing_references' } : {},
|
||||
};
|
||||
|
||||
export function importTestSuiteFactory(es: Client, esArchiver: any, supertest: SuperTest<any>) {
|
||||
const expectSavedObjectForbidden = expectResponses.forbiddenTypes('bulk_create');
|
||||
const expectSavedObjectForbidden = (action: string, typeOrTypes: string | string[]) =>
|
||||
expectResponses.forbiddenTypes(action)(typeOrTypes);
|
||||
const expectResponseBody =
|
||||
(
|
||||
testCases: ImportTestCase | ImportTestCase[],
|
||||
|
@ -87,12 +137,12 @@ export function importTestSuiteFactory(es: Client, esArchiver: any, supertest: S
|
|||
const testCaseArray = Array.isArray(testCases) ? testCases : [testCases];
|
||||
if (statusCode === 403) {
|
||||
const types = testCaseArray.map((x) => x.type);
|
||||
await expectSavedObjectForbidden(types)(response);
|
||||
await expectSavedObjectForbidden('bulk_create', types)(response);
|
||||
} else {
|
||||
// permitted
|
||||
const { success, successCount, successResults, errors } = response.body;
|
||||
const expectedSuccesses = testCaseArray.filter((x) => !x.failure);
|
||||
const expectedFailures = testCaseArray.filter((x) => x.failure);
|
||||
const expectedSuccesses = testCaseArray.filter((x) => !x.failureType);
|
||||
const expectedFailures = testCaseArray.filter((x) => x.failureType);
|
||||
expect(success).to.eql(expectedFailures.length === 0);
|
||||
expect(successCount).to.eql(expectedSuccesses.length);
|
||||
if (expectedFailures.length) {
|
||||
|
@ -147,30 +197,37 @@ export function importTestSuiteFactory(es: Client, esArchiver: any, supertest: S
|
|||
}
|
||||
}
|
||||
for (let i = 0; i < expectedFailures.length; i++) {
|
||||
const { type, id, failure, fail409Param, expectedNewId } = expectedFailures[i];
|
||||
const { type, id, failureType, expectedNewId } = expectedFailures[i];
|
||||
// we don't know the order of the returned errors; search for each one
|
||||
const object = (errors as Array<Record<string, unknown>>).find(
|
||||
(x) => x.type === type && x.id === id
|
||||
);
|
||||
expect(object).not.to.be(undefined);
|
||||
if (failure === 400) {
|
||||
expect(object!.error).to.eql({ type: 'unsupported_type' });
|
||||
} else {
|
||||
// 409
|
||||
let error: Record<string, unknown> = {
|
||||
type: 'conflict',
|
||||
...(expectedNewId && { destinationId: expectedNewId }),
|
||||
};
|
||||
if (fail409Param === 'ambiguous_conflict_2c') {
|
||||
// "ambiguous destination" conflict
|
||||
error = {
|
||||
type: 'ambiguous_conflict',
|
||||
// response destinations should be sorted by updatedAt in descending order, then ID in ascending order
|
||||
destinations: [getConflictDest(`${CID}2a`), getConflictDest(`${CID}2b`)],
|
||||
};
|
||||
}
|
||||
expect(object!.error).to.eql(error);
|
||||
const expectedError: Record<string, unknown> = { type: failureType };
|
||||
switch (failureType!) {
|
||||
case 'unsupported_type':
|
||||
break;
|
||||
case 'conflict':
|
||||
if (expectedNewId) {
|
||||
expectedError.destinationId = expectedNewId;
|
||||
}
|
||||
break;
|
||||
case 'ambiguous_conflict':
|
||||
// We only have one test case for ambiguous conflicts, so these destination IDs are hardcoded below for simplicity.
|
||||
// Response destinations should be sorted by updatedAt in descending order, then ID in ascending order.
|
||||
expectedError.destinations = [
|
||||
getConflictDest(`conflict_2a`),
|
||||
getConflictDest(`conflict_2b`),
|
||||
];
|
||||
break;
|
||||
case 'missing_references':
|
||||
// We only have one test case for missing references, so this reference is hardcoded below for simplicity.
|
||||
expectedError.references = [
|
||||
{ type: 'index-pattern', id: 'inbound-reference-origin-match-2' },
|
||||
];
|
||||
break;
|
||||
}
|
||||
expect(object!.error).to.eql(expectedError);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
import expect from '@kbn/expect';
|
||||
import { SuperTest } from 'supertest';
|
||||
import type { Client } from '@elastic/elasticsearch';
|
||||
import type { SavedObjectReference, SavedObjectsImportRetry } from 'src/core/server';
|
||||
import { SAVED_OBJECT_TEST_CASES as CASES } from '../lib/saved_object_test_cases';
|
||||
import { SPACES } from '../lib/spaces';
|
||||
import { expectResponses, getUrlPrefix, getTestTitle } from '../lib/saved_object_test_utils';
|
||||
|
@ -15,18 +16,32 @@ import { ExpectResponseBody, TestCase, TestDefinition, TestSuite } from '../lib/
|
|||
|
||||
export interface ResolveImportErrorsTestDefinition extends TestDefinition {
|
||||
request: {
|
||||
objects: Array<{ type: string; id: string; originId?: string }>;
|
||||
retries: Array<{ type: string; id: string; overwrite: boolean; destinationId?: string }>;
|
||||
objects: Array<{
|
||||
type: string;
|
||||
id: string;
|
||||
originId?: string;
|
||||
references?: SavedObjectReference[];
|
||||
}>;
|
||||
retries: Array<{
|
||||
type: string;
|
||||
id: string;
|
||||
overwrite: boolean;
|
||||
destinationId?: string;
|
||||
replaceReferences?: SavedObjectsImportRetry['replaceReferences'];
|
||||
}>;
|
||||
};
|
||||
overwrite: boolean;
|
||||
createNewCopies: boolean;
|
||||
}
|
||||
export type ResolveImportErrorsTestSuite = TestSuite<ResolveImportErrorsTestDefinition>;
|
||||
export interface ResolveImportErrorsTestCase extends TestCase {
|
||||
export type FailureType = 'unsupported_type' | 'conflict';
|
||||
export interface ResolveImportErrorsTestCase extends Omit<TestCase, 'failure'> {
|
||||
originId?: string;
|
||||
expectedNewId?: string;
|
||||
references?: SavedObjectReference[];
|
||||
replaceReferences?: SavedObjectsImportRetry['replaceReferences'];
|
||||
successParam?: string;
|
||||
failure?: 400 | 409; // only used for permitted response case
|
||||
failureType?: FailureType; // only used for permitted response case
|
||||
}
|
||||
|
||||
const NEW_ATTRIBUTE_KEY = 'title'; // all type mappings include this attribute, for simplicity's sake
|
||||
|
@ -39,8 +54,9 @@ const NEW_ATTRIBUTE_VAL = `New attribute value ${Date.now()}`;
|
|||
// * id: conflict_3
|
||||
// * id: conflict_4a, originId: conflict_4
|
||||
// using the five conflict test case objects below, we can exercise various permutations of exact/inexact/ambiguous conflict scenarios
|
||||
const { HIDDEN, ...REMAINING_CASES } = CASES;
|
||||
export const TEST_CASES: Record<string, ResolveImportErrorsTestCase> = Object.freeze({
|
||||
...CASES,
|
||||
...REMAINING_CASES,
|
||||
CONFLICT_1A_OBJ: Object.freeze({
|
||||
type: 'sharedtype',
|
||||
id: `conflict_1a`,
|
||||
|
@ -71,32 +87,78 @@ export const TEST_CASES: Record<string, ResolveImportErrorsTestCase> = Object.fr
|
|||
expectedNewId: `conflict_4a`,
|
||||
}),
|
||||
});
|
||||
export const SPECIAL_TEST_CASES: Record<string, ResolveImportErrorsTestCase> = Object.freeze({
|
||||
HIDDEN,
|
||||
OUTBOUND_REFERENCE_ORIGIN_MATCH_1_OBJ: Object.freeze({
|
||||
// This object does not already exist, but it has a reference to the originId of an index pattern that does exist.
|
||||
// We use index patterns because they are one of the few reference types that are validated, so the import will fail if the reference
|
||||
// is broken.
|
||||
// This import is designed to succeed because there is exactly one origin match for its reference, and that reference will be changed to
|
||||
// match the index pattern's new ID.
|
||||
type: 'sharedtype',
|
||||
id: 'outbound-reference-origin-match-1',
|
||||
references: [{ name: '1', type: 'index-pattern', id: 'inbound-reference-origin-match-1' }],
|
||||
}),
|
||||
OUTBOUND_REFERENCE_ORIGIN_MATCH_2_OBJ: Object.freeze({
|
||||
// This object does not already exist, but it has a reference to the originId of two index patterns that do exist.
|
||||
// This import would normally fail because there are two origin matches for its reference, and we can't currently handle ambiguous
|
||||
// destinations for reference origin matches.
|
||||
// However, when retrying we can specify which reference(s) should be replaced.
|
||||
type: 'sharedtype',
|
||||
id: 'outbound-reference-origin-match-2',
|
||||
references: [{ name: '1', type: 'index-pattern', id: 'inbound-reference-origin-match-2' }],
|
||||
replaceReferences: [
|
||||
{
|
||||
type: 'index-pattern',
|
||||
from: 'inbound-reference-origin-match-2',
|
||||
to: 'inbound-reference-origin-match-2a',
|
||||
},
|
||||
],
|
||||
}),
|
||||
});
|
||||
|
||||
/**
|
||||
* Test cases have additional properties that we don't want to send in HTTP Requests
|
||||
*/
|
||||
const createRequest = (
|
||||
{ type, id, originId, expectedNewId, successParam }: ResolveImportErrorsTestCase,
|
||||
{
|
||||
type,
|
||||
id,
|
||||
originId,
|
||||
expectedNewId,
|
||||
references,
|
||||
replaceReferences,
|
||||
successParam,
|
||||
}: ResolveImportErrorsTestCase,
|
||||
overwrite: boolean
|
||||
): ResolveImportErrorsTestDefinition['request'] => ({
|
||||
objects: [{ type, id, ...(originId && { originId }) }],
|
||||
objects: [{ type, id, ...(originId && { originId }), ...(references && { references }) }],
|
||||
retries: [
|
||||
{
|
||||
type,
|
||||
id,
|
||||
overwrite,
|
||||
...(expectedNewId && { destinationId: expectedNewId }),
|
||||
...(replaceReferences && { replaceReferences }),
|
||||
...(successParam === 'createNewCopy' && { createNewCopy: true }),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
export const resolveImportErrorsTestCaseFailures = {
|
||||
failUnsupportedType: (condition?: boolean): { failureType?: 'unsupported_type' } =>
|
||||
condition !== false ? { failureType: 'unsupported_type' } : {},
|
||||
failConflict: (condition?: boolean): { failureType?: 'conflict' } =>
|
||||
condition !== false ? { failureType: 'conflict' } : {},
|
||||
};
|
||||
|
||||
export function resolveImportErrorsTestSuiteFactory(
|
||||
es: Client,
|
||||
esArchiver: any,
|
||||
supertest: SuperTest<any>
|
||||
) {
|
||||
const expectSavedObjectForbidden = expectResponses.forbiddenTypes('bulk_create');
|
||||
const expectSavedObjectForbidden = (action: string, typeOrTypes: string | string[]) =>
|
||||
expectResponses.forbiddenTypes(action)(typeOrTypes);
|
||||
const expectResponseBody =
|
||||
(
|
||||
testCases: ResolveImportErrorsTestCase | ResolveImportErrorsTestCase[],
|
||||
|
@ -110,12 +172,12 @@ export function resolveImportErrorsTestSuiteFactory(
|
|||
const testCaseArray = Array.isArray(testCases) ? testCases : [testCases];
|
||||
if (statusCode === 403) {
|
||||
const types = testCaseArray.map((x) => x.type);
|
||||
await expectSavedObjectForbidden(types)(response);
|
||||
await expectSavedObjectForbidden('bulk_create', types)(response);
|
||||
} else {
|
||||
// permitted
|
||||
const { success, successCount, successResults, errors } = response.body;
|
||||
const expectedSuccesses = testCaseArray.filter((x) => !x.failure);
|
||||
const expectedFailures = testCaseArray.filter((x) => x.failure);
|
||||
const expectedSuccesses = testCaseArray.filter((x) => !x.failureType);
|
||||
const expectedFailures = testCaseArray.filter((x) => x.failureType);
|
||||
expect(success).to.eql(expectedFailures.length === 0);
|
||||
expect(successCount).to.eql(expectedSuccesses.length);
|
||||
if (expectedFailures.length) {
|
||||
|
@ -168,21 +230,23 @@ export function resolveImportErrorsTestSuiteFactory(
|
|||
}
|
||||
}
|
||||
for (let i = 0; i < expectedFailures.length; i++) {
|
||||
const { type, id, failure, expectedNewId } = expectedFailures[i];
|
||||
const { type, id, failureType, expectedNewId } = expectedFailures[i];
|
||||
// we don't know the order of the returned errors; search for each one
|
||||
const object = (errors as Array<Record<string, unknown>>).find(
|
||||
(x) => x.type === type && x.id === id
|
||||
);
|
||||
expect(object).not.to.be(undefined);
|
||||
if (failure === 400) {
|
||||
expect(object!.error).to.eql({ type: 'unsupported_type' });
|
||||
} else {
|
||||
// 409
|
||||
expect(object!.error).to.eql({
|
||||
type: 'conflict',
|
||||
...(expectedNewId && { destinationId: expectedNewId }),
|
||||
});
|
||||
const expectedError: Record<string, unknown> = { type: failureType };
|
||||
switch (failureType!) {
|
||||
case 'unsupported_type':
|
||||
break;
|
||||
case 'conflict':
|
||||
if (expectedNewId) {
|
||||
expectedError.destinationId = expectedNewId;
|
||||
}
|
||||
break;
|
||||
}
|
||||
expect(object!.error).to.eql(expectedError);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -6,12 +6,14 @@
|
|||
*/
|
||||
|
||||
import { SPACES } from '../../common/lib/spaces';
|
||||
import { testCaseFailures, getTestScenarios } from '../../common/lib/saved_object_test_utils';
|
||||
import { getTestScenarios } from '../../common/lib/saved_object_test_utils';
|
||||
import { TestUser } from '../../common/lib/types';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
import {
|
||||
importTestSuiteFactory,
|
||||
importTestCaseFailures,
|
||||
TEST_CASES as CASES,
|
||||
SPECIAL_TEST_CASES,
|
||||
ImportTestDefinition,
|
||||
} from '../../common/suites/import';
|
||||
|
||||
|
@ -20,21 +22,23 @@ const {
|
|||
SPACE_1: { spaceId: SPACE_1_ID },
|
||||
SPACE_2: { spaceId: SPACE_2_ID },
|
||||
} = SPACES;
|
||||
const { fail400, fail409 } = testCaseFailures;
|
||||
const { failUnsupportedType, failConflict, failAmbiguousConflict, failMissingReferences } =
|
||||
importTestCaseFailures;
|
||||
const destinationId = (condition?: boolean) =>
|
||||
condition !== false ? { successParam: 'destinationId' } : {};
|
||||
const newCopy = () => ({ successParam: 'createNewCopy' });
|
||||
const ambiguousConflict = (suffix: string) => ({
|
||||
failure: 409 as 409,
|
||||
fail409Param: `ambiguous_conflict_${suffix}`,
|
||||
});
|
||||
|
||||
const createNewCopiesTestCases = () => {
|
||||
// for each outcome, if failure !== undefined then we expect to receive
|
||||
// an error; otherwise, we expect to receive a success result
|
||||
const cases = Object.entries(CASES).filter(([key]) => key !== 'HIDDEN');
|
||||
const importable = cases.map(([, val]) => ({ ...val, successParam: 'createNewCopies' }));
|
||||
const nonImportable = [{ ...CASES.HIDDEN, ...fail400() }];
|
||||
const importable = Object.entries(CASES).map(([, val]) => ({
|
||||
...val,
|
||||
successParam: 'createNewCopies',
|
||||
}));
|
||||
const nonImportable = [{ ...CASES.HIDDEN, ...failUnsupportedType() }]; // unsupported_type is an "unresolvable" error
|
||||
// Other special test cases are excluded because they can result in "resolvable" errors that will prevent the rest of the objects from
|
||||
// being created. The test suite assumes that when the createNewCopies option is enabled, all non-error results are actually created,
|
||||
// and it makes assertions based on that.
|
||||
const all = [...importable, ...nonImportable];
|
||||
return { importable, nonImportable, all };
|
||||
};
|
||||
|
@ -46,64 +50,92 @@ const createTestCases = (overwrite: boolean, spaceId: string) => {
|
|||
// when overwrite=true, all of the objects in this group are created successfully, so we can check the created object attributes
|
||||
{
|
||||
...CASES.SINGLE_NAMESPACE_DEFAULT_SPACE,
|
||||
...fail409(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
...failConflict(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
},
|
||||
{ ...CASES.SINGLE_NAMESPACE_SPACE_1, ...fail409(!overwrite && spaceId === SPACE_1_ID) },
|
||||
{ ...CASES.SINGLE_NAMESPACE_SPACE_2, ...fail409(!overwrite && spaceId === SPACE_2_ID) },
|
||||
{ ...CASES.NAMESPACE_AGNOSTIC, ...fail409(!overwrite) },
|
||||
{ ...CASES.SINGLE_NAMESPACE_SPACE_1, ...failConflict(!overwrite && spaceId === SPACE_1_ID) },
|
||||
{ ...CASES.SINGLE_NAMESPACE_SPACE_2, ...failConflict(!overwrite && spaceId === SPACE_2_ID) },
|
||||
{ ...CASES.NAMESPACE_AGNOSTIC, ...failConflict(!overwrite) },
|
||||
CASES.NEW_SINGLE_NAMESPACE_OBJ,
|
||||
CASES.NEW_NAMESPACE_AGNOSTIC_OBJ,
|
||||
];
|
||||
const group1NonImportable = [{ ...CASES.HIDDEN, ...fail400() }];
|
||||
const group1NonImportable = [{ ...CASES.HIDDEN, ...failUnsupportedType() }];
|
||||
const group1All = group1Importable.concat(group1NonImportable);
|
||||
const group2 = [
|
||||
// when overwrite=true, all of the objects in this group are created successfully, so we can check the created object attributes
|
||||
CASES.NEW_MULTI_NAMESPACE_OBJ,
|
||||
{ ...CASES.MULTI_NAMESPACE_ALL_SPACES, ...fail409(!overwrite) },
|
||||
{ ...CASES.MULTI_NAMESPACE_ALL_SPACES, ...failConflict(!overwrite) },
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_DEFAULT_AND_SPACE_1,
|
||||
...fail409(!overwrite && (spaceId === DEFAULT_SPACE_ID || spaceId === SPACE_1_ID)),
|
||||
...failConflict(!overwrite && (spaceId === DEFAULT_SPACE_ID || spaceId === SPACE_1_ID)),
|
||||
...destinationId(spaceId !== DEFAULT_SPACE_ID && spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ONLY_SPACE_1,
|
||||
...fail409(!overwrite && spaceId === SPACE_1_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_1_ID),
|
||||
...destinationId(spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ONLY_SPACE_2,
|
||||
...fail409(!overwrite && spaceId === SPACE_2_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_2_ID),
|
||||
...destinationId(spaceId !== SPACE_2_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ISOLATED_ONLY_DEFAULT_SPACE,
|
||||
...fail409(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
...failConflict(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
...destinationId(spaceId !== DEFAULT_SPACE_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ISOLATED_ONLY_SPACE_1,
|
||||
...fail409(!overwrite && spaceId === SPACE_1_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_1_ID),
|
||||
...destinationId(spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{ ...CASES.CONFLICT_1A_OBJ, ...newCopy() }, // "ambiguous source" conflict which results in a new destination ID and empty origin ID
|
||||
{ ...CASES.CONFLICT_1B_OBJ, ...newCopy() }, // "ambiguous source" conflict which results in a new destination ID and empty origin ID
|
||||
{ ...CASES.CONFLICT_3A_OBJ, ...fail409(!overwrite), ...destinationId() }, // "inexact match" conflict
|
||||
{ ...CASES.CONFLICT_4_OBJ, ...fail409(!overwrite), ...destinationId() }, // "inexact match" conflict
|
||||
{ ...CASES.CONFLICT_3A_OBJ, ...failConflict(!overwrite), ...destinationId() }, // "inexact match" conflict
|
||||
{ ...CASES.CONFLICT_4_OBJ, ...failConflict(!overwrite), ...destinationId() }, // "inexact match" conflict
|
||||
];
|
||||
const group3 = [
|
||||
// when overwrite=true, all of the objects in this group are errors, so we cannot check the created object attributes
|
||||
// grouping errors together simplifies the test suite code
|
||||
{ ...CASES.CONFLICT_2C_OBJ, ...ambiguousConflict('2c') }, // "ambiguous destination" conflict
|
||||
{ ...CASES.CONFLICT_2C_OBJ, ...failAmbiguousConflict() }, // "ambiguous destination" conflict
|
||||
];
|
||||
const group4 = [
|
||||
// This group needs to be executed *after* the previous test case, because those error assertions include metadata of the destinations,
|
||||
// and *these* test cases would change that metadata.
|
||||
{ ...CASES.CONFLICT_2A_OBJ, ...failConflict(!overwrite) }, // "exact match" conflict with 2a
|
||||
{
|
||||
// "inexact match" conflict with 2b (since 2a already has a conflict source, this is not an ambiguous destination conflict)
|
||||
...CASES.CONFLICT_2C_OBJ,
|
||||
...failConflict(!overwrite),
|
||||
...destinationId(),
|
||||
expectedNewId: 'conflict_2b',
|
||||
},
|
||||
];
|
||||
const group5 = [
|
||||
// when overwrite=true, all of the objects in this group are created successfully, so we can check the created object attributes
|
||||
{ ...CASES.CONFLICT_1_OBJ, ...fail409(!overwrite) }, // "exact match" conflict
|
||||
{ ...CASES.CONFLICT_1_OBJ, ...failConflict(!overwrite) }, // "exact match" conflict
|
||||
CASES.CONFLICT_1A_OBJ, // no conflict because CONFLICT_1_OBJ is an exact match
|
||||
CASES.CONFLICT_1B_OBJ, // no conflict because CONFLICT_1_OBJ is an exact match
|
||||
{ ...CASES.CONFLICT_2C_OBJ, ...newCopy() }, // "ambiguous source and destination" conflict which results in a new destination ID and empty origin ID
|
||||
{ ...CASES.CONFLICT_2D_OBJ, ...newCopy() }, // "ambiguous source and destination" conflict which results in a new destination ID and empty origin ID
|
||||
];
|
||||
return { group1Importable, group1NonImportable, group1All, group2, group3, group4 };
|
||||
const refOrigins = [
|
||||
// One of these cases will always generate a missing_references error, which is an "unresolvable" error that stops any other objects
|
||||
// from being created in the import. Other test cases can have assertions based on the created objects' attributes when the overwrite
|
||||
// option is enabled, but these test cases are simply asserting pass/fail, so this group needs to be tested separately.
|
||||
{ ...SPECIAL_TEST_CASES.OUTBOUND_REFERENCE_ORIGIN_MATCH_1_OBJ },
|
||||
{ ...SPECIAL_TEST_CASES.OUTBOUND_REFERENCE_ORIGIN_MATCH_2_OBJ, ...failMissingReferences() },
|
||||
];
|
||||
return {
|
||||
group1Importable,
|
||||
group1NonImportable,
|
||||
group1All,
|
||||
group2,
|
||||
group3,
|
||||
group4,
|
||||
group5,
|
||||
refOrigins,
|
||||
};
|
||||
};
|
||||
|
||||
export default function ({ getService }: FtrProviderContext) {
|
||||
|
@ -121,48 +153,89 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
|
||||
if (createNewCopies) {
|
||||
const { importable, nonImportable, all } = createNewCopiesTestCases();
|
||||
const unauthorizedCommonTestDefinitions = [
|
||||
createTestDefinitions(importable, true, { createNewCopies, spaceId }),
|
||||
createTestDefinitions(nonImportable, false, { createNewCopies, spaceId, singleRequest }),
|
||||
createTestDefinitions(all, true, {
|
||||
createNewCopies,
|
||||
spaceId,
|
||||
singleRequest,
|
||||
responseBodyOverride: expectSavedObjectForbidden('bulk_create', [
|
||||
'globaltype',
|
||||
'isolatedtype',
|
||||
'sharedtype',
|
||||
'sharecapabletype',
|
||||
]),
|
||||
}),
|
||||
];
|
||||
return {
|
||||
unauthorized: [
|
||||
createTestDefinitions(importable, true, { createNewCopies, spaceId }),
|
||||
createTestDefinitions(nonImportable, false, { createNewCopies, spaceId, singleRequest }),
|
||||
createTestDefinitions(all, true, {
|
||||
createNewCopies,
|
||||
spaceId,
|
||||
singleRequest,
|
||||
responseBodyOverride: expectSavedObjectForbidden([
|
||||
'globaltype',
|
||||
'isolatedtype',
|
||||
'sharedtype',
|
||||
'sharecapabletype',
|
||||
]),
|
||||
}),
|
||||
].flat(),
|
||||
unauthorizedRead: unauthorizedCommonTestDefinitions.flat(),
|
||||
unauthorizedWrite: unauthorizedCommonTestDefinitions.flat(),
|
||||
authorized: createTestDefinitions(all, false, { createNewCopies, spaceId, singleRequest }),
|
||||
};
|
||||
}
|
||||
|
||||
const { group1Importable, group1NonImportable, group1All, group2, group3, group4 } =
|
||||
createTestCases(overwrite, spaceId);
|
||||
return {
|
||||
unauthorized: [
|
||||
createTestDefinitions(group1Importable, true, { overwrite, spaceId }),
|
||||
createTestDefinitions(group1NonImportable, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group1All, true, {
|
||||
const {
|
||||
group1Importable,
|
||||
group1NonImportable,
|
||||
group1All,
|
||||
group2,
|
||||
group3,
|
||||
group4,
|
||||
group5,
|
||||
refOrigins,
|
||||
} = createTestCases(overwrite, spaceId);
|
||||
const unauthorizedCommonTestDefinitions = [
|
||||
createTestDefinitions(group1Importable, true, { overwrite, spaceId }),
|
||||
createTestDefinitions(group1NonImportable, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group1All, true, {
|
||||
overwrite,
|
||||
spaceId,
|
||||
singleRequest,
|
||||
responseBodyOverride: expectSavedObjectForbidden('bulk_create', [
|
||||
'globaltype',
|
||||
'isolatedtype',
|
||||
]),
|
||||
}),
|
||||
createTestDefinitions(group2, true, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group3, true, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group4, true, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group5, true, { overwrite, spaceId, singleRequest }),
|
||||
];
|
||||
const unauthorizedReadTestDefinitions = [...unauthorizedCommonTestDefinitions];
|
||||
const unauthorizedWriteTestDefinitions = [...unauthorizedCommonTestDefinitions];
|
||||
const authorizedTestDefinitions = [
|
||||
createTestDefinitions(group1All, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group2, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group3, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group4, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group5, false, { overwrite, spaceId, singleRequest }),
|
||||
];
|
||||
if (!overwrite) {
|
||||
// Only include this group of test cases if the overwrite option is not enabled
|
||||
unauthorizedReadTestDefinitions.push(
|
||||
createTestDefinitions(refOrigins, true, {
|
||||
overwrite,
|
||||
spaceId,
|
||||
singleRequest,
|
||||
responseBodyOverride: expectSavedObjectForbidden(['globaltype', 'isolatedtype']),
|
||||
}),
|
||||
createTestDefinitions(group2, true, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group3, true, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group4, true, { overwrite, spaceId, singleRequest }),
|
||||
].flat(),
|
||||
authorized: [
|
||||
createTestDefinitions(group1All, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group2, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group3, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group4, false, { overwrite, spaceId, singleRequest }),
|
||||
].flat(),
|
||||
responseBodyOverride: expectSavedObjectForbidden('bulk_get', ['index-pattern']),
|
||||
})
|
||||
);
|
||||
unauthorizedWriteTestDefinitions.push(
|
||||
createTestDefinitions(refOrigins, true, {
|
||||
overwrite,
|
||||
spaceId,
|
||||
singleRequest,
|
||||
})
|
||||
);
|
||||
authorizedTestDefinitions.push(
|
||||
createTestDefinitions(refOrigins, false, { overwrite, spaceId, singleRequest })
|
||||
);
|
||||
}
|
||||
return {
|
||||
unauthorizedRead: unauthorizedReadTestDefinitions.flat(),
|
||||
unauthorizedWrite: unauthorizedWriteTestDefinitions.flat(),
|
||||
authorized: authorizedTestDefinitions.flat(),
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -180,20 +253,20 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
? ' with createNewCopies enabled'
|
||||
: ''
|
||||
}`;
|
||||
const { unauthorized, authorized } = createTests(overwrite, createNewCopies, spaceId);
|
||||
const { unauthorizedRead, unauthorizedWrite, authorized } = createTests(
|
||||
overwrite,
|
||||
createNewCopies,
|
||||
spaceId
|
||||
);
|
||||
const _addTests = (user: TestUser, tests: ImportTestDefinition[]) => {
|
||||
addTests(`${user.description}${suffix}`, { user, spaceId, tests });
|
||||
};
|
||||
|
||||
[
|
||||
users.noAccess,
|
||||
users.legacyAll,
|
||||
users.dualRead,
|
||||
users.readGlobally,
|
||||
users.readAtSpace,
|
||||
users.allAtOtherSpace,
|
||||
].forEach((user) => {
|
||||
_addTests(user, unauthorized);
|
||||
[users.noAccess, users.legacyAll, users.allAtOtherSpace].forEach((user) => {
|
||||
_addTests(user, unauthorizedRead);
|
||||
});
|
||||
[users.dualRead, users.readGlobally, users.readAtSpace].forEach((user) => {
|
||||
_addTests(user, unauthorizedWrite);
|
||||
});
|
||||
[users.dualAll, users.allGlobally, users.allAtSpace, users.superuser].forEach((user) => {
|
||||
_addTests(user, authorized);
|
||||
|
|
|
@ -7,12 +7,14 @@
|
|||
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { SPACES } from '../../common/lib/spaces';
|
||||
import { testCaseFailures, getTestScenarios } from '../../common/lib/saved_object_test_utils';
|
||||
import { getTestScenarios } from '../../common/lib/saved_object_test_utils';
|
||||
import { TestUser } from '../../common/lib/types';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
import {
|
||||
resolveImportErrorsTestSuiteFactory,
|
||||
resolveImportErrorsTestCaseFailures,
|
||||
TEST_CASES as CASES,
|
||||
SPECIAL_TEST_CASES,
|
||||
ResolveImportErrorsTestDefinition,
|
||||
} from '../../common/suites/resolve_import_errors';
|
||||
|
||||
|
@ -21,7 +23,7 @@ const {
|
|||
SPACE_1: { spaceId: SPACE_1_ID },
|
||||
SPACE_2: { spaceId: SPACE_2_ID },
|
||||
} = SPACES;
|
||||
const { fail400, fail409 } = testCaseFailures;
|
||||
const { failUnsupportedType, failConflict } = resolveImportErrorsTestCaseFailures;
|
||||
const destinationId = (condition?: boolean) =>
|
||||
condition !== false ? { successParam: 'destinationId' } : {};
|
||||
const newCopy = () => ({ successParam: 'createNewCopy' });
|
||||
|
@ -29,13 +31,12 @@ const newCopy = () => ({ successParam: 'createNewCopy' });
|
|||
const createNewCopiesTestCases = () => {
|
||||
// for each outcome, if failure !== undefined then we expect to receive
|
||||
// an error; otherwise, we expect to receive a success result
|
||||
const cases = Object.entries(CASES).filter(([key]) => key !== 'HIDDEN');
|
||||
const importable = cases.map(([, val]) => ({
|
||||
const importable = Object.entries(CASES).map(([, val]) => ({
|
||||
...val,
|
||||
successParam: 'createNewCopies',
|
||||
expectedNewId: uuidv4(),
|
||||
}));
|
||||
const nonImportable = [{ ...CASES.HIDDEN, ...fail400() }];
|
||||
const nonImportable = [{ ...SPECIAL_TEST_CASES.HIDDEN, ...failUnsupportedType() }]; // unsupported_type is an "unresolvable" error
|
||||
const all = [...importable, ...nonImportable];
|
||||
return { importable, nonImportable, all };
|
||||
};
|
||||
|
@ -50,36 +51,36 @@ const createTestCases = (overwrite: boolean, spaceId: string) => {
|
|||
? CASES.SINGLE_NAMESPACE_SPACE_1
|
||||
: CASES.SINGLE_NAMESPACE_SPACE_2;
|
||||
const group1Importable = [
|
||||
{ ...singleNamespaceObject, ...fail409(!overwrite) },
|
||||
{ ...CASES.NAMESPACE_AGNOSTIC, ...fail409(!overwrite) },
|
||||
{ ...singleNamespaceObject, ...failConflict(!overwrite) },
|
||||
{ ...CASES.NAMESPACE_AGNOSTIC, ...failConflict(!overwrite) },
|
||||
];
|
||||
const group1NonImportable = [{ ...CASES.HIDDEN, ...fail400() }];
|
||||
const group1NonImportable = [{ ...SPECIAL_TEST_CASES.HIDDEN, ...failUnsupportedType() }];
|
||||
const group1All = [...group1Importable, ...group1NonImportable];
|
||||
const group2 = [
|
||||
{ ...CASES.MULTI_NAMESPACE_ALL_SPACES, ...fail409(!overwrite) },
|
||||
{ ...CASES.MULTI_NAMESPACE_ALL_SPACES, ...failConflict(!overwrite) },
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_DEFAULT_AND_SPACE_1,
|
||||
...fail409(!overwrite && (spaceId === DEFAULT_SPACE_ID || spaceId === SPACE_1_ID)),
|
||||
...failConflict(!overwrite && (spaceId === DEFAULT_SPACE_ID || spaceId === SPACE_1_ID)),
|
||||
...destinationId(spaceId !== DEFAULT_SPACE_ID && spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ONLY_SPACE_1,
|
||||
...fail409(!overwrite && spaceId === SPACE_1_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_1_ID),
|
||||
...destinationId(spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ONLY_SPACE_2,
|
||||
...fail409(!overwrite && spaceId === SPACE_2_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_2_ID),
|
||||
...destinationId(spaceId !== SPACE_2_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ISOLATED_ONLY_DEFAULT_SPACE,
|
||||
...fail409(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
...failConflict(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
...destinationId(spaceId !== DEFAULT_SPACE_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ISOLATED_ONLY_SPACE_1,
|
||||
...fail409(!overwrite && spaceId === SPACE_1_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_1_ID),
|
||||
...destinationId(spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{ ...CASES.CONFLICT_1A_OBJ, ...newCopy() }, // "ambiguous source" conflict which results in a new destination ID and empty origin ID
|
||||
|
@ -87,11 +88,16 @@ const createTestCases = (overwrite: boolean, spaceId: string) => {
|
|||
// all of the cases below represent imports that had an inexact match conflict or an ambiguous conflict
|
||||
// if we call _resolve_import_errors and don't specify overwrite, each of these will result in a conflict because an object with that
|
||||
// `expectedDestinationId` already exists
|
||||
{ ...CASES.CONFLICT_2C_OBJ, ...fail409(!overwrite), ...destinationId() }, // "ambiguous destination" conflict; if overwrite=true, will overwrite 'conflict_2a'
|
||||
{ ...CASES.CONFLICT_3A_OBJ, ...fail409(!overwrite), ...destinationId() }, // "inexact match" conflict; if overwrite=true, will overwrite 'conflict_3'
|
||||
{ ...CASES.CONFLICT_4_OBJ, ...fail409(!overwrite), ...destinationId() }, // "inexact match" conflict; if overwrite=true, will overwrite 'conflict_4a'
|
||||
{ ...CASES.CONFLICT_2C_OBJ, ...failConflict(!overwrite), ...destinationId() }, // "ambiguous destination" conflict; if overwrite=true, will overwrite 'conflict_2a'
|
||||
{ ...CASES.CONFLICT_3A_OBJ, ...failConflict(!overwrite), ...destinationId() }, // "inexact match" conflict; if overwrite=true, will overwrite 'conflict_3'
|
||||
{ ...CASES.CONFLICT_4_OBJ, ...failConflict(!overwrite), ...destinationId() }, // "inexact match" conflict; if overwrite=true, will overwrite 'conflict_4a'
|
||||
];
|
||||
return { group1Importable, group1NonImportable, group1All, group2 };
|
||||
const refOrigins = [
|
||||
// These are in a separate group because they will result in a different 403 error for users who are unauthorized to read
|
||||
{ ...SPECIAL_TEST_CASES.OUTBOUND_REFERENCE_ORIGIN_MATCH_1_OBJ },
|
||||
{ ...SPECIAL_TEST_CASES.OUTBOUND_REFERENCE_ORIGIN_MATCH_2_OBJ },
|
||||
];
|
||||
return { group1Importable, group1NonImportable, group1All, group2, refOrigins };
|
||||
};
|
||||
|
||||
export default function ({ getService }: FtrProviderContext) {
|
||||
|
@ -107,45 +113,62 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
|
||||
if (createNewCopies) {
|
||||
const { importable, nonImportable, all } = createNewCopiesTestCases();
|
||||
const unauthorizedCommonTestDefinitions = [
|
||||
createTestDefinitions(importable, true, { createNewCopies, spaceId }),
|
||||
createTestDefinitions(nonImportable, false, { createNewCopies, spaceId, singleRequest }),
|
||||
createTestDefinitions(all, true, {
|
||||
createNewCopies,
|
||||
spaceId,
|
||||
singleRequest,
|
||||
responseBodyOverride: expectSavedObjectForbidden('bulk_create', [
|
||||
'globaltype',
|
||||
'isolatedtype',
|
||||
'sharedtype',
|
||||
'sharecapabletype',
|
||||
]),
|
||||
}),
|
||||
];
|
||||
return {
|
||||
unauthorized: [
|
||||
createTestDefinitions(importable, true, { createNewCopies, spaceId }),
|
||||
createTestDefinitions(nonImportable, false, { createNewCopies, spaceId, singleRequest }),
|
||||
createTestDefinitions(all, true, {
|
||||
createNewCopies,
|
||||
spaceId,
|
||||
singleRequest,
|
||||
responseBodyOverride: expectSavedObjectForbidden([
|
||||
'globaltype',
|
||||
'isolatedtype',
|
||||
'sharedtype',
|
||||
'sharecapabletype',
|
||||
]),
|
||||
}),
|
||||
].flat(),
|
||||
unauthorizedRead: unauthorizedCommonTestDefinitions.flat(),
|
||||
unauthorizedWrite: unauthorizedCommonTestDefinitions.flat(),
|
||||
authorized: createTestDefinitions(all, false, { createNewCopies, spaceId, singleRequest }),
|
||||
};
|
||||
}
|
||||
|
||||
const { group1Importable, group1NonImportable, group1All, group2 } = createTestCases(
|
||||
overwrite,
|
||||
spaceId
|
||||
);
|
||||
const { group1Importable, group1NonImportable, group1All, group2, refOrigins } =
|
||||
createTestCases(overwrite, spaceId);
|
||||
const unauthorizedCommonTestDefinitions = [
|
||||
createTestDefinitions(group1Importable, true, { overwrite, spaceId }),
|
||||
createTestDefinitions(group1NonImportable, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group1All, true, {
|
||||
overwrite,
|
||||
spaceId,
|
||||
singleRequest,
|
||||
responseBodyOverride: expectSavedObjectForbidden('bulk_create', [
|
||||
'globaltype',
|
||||
'isolatedtype',
|
||||
]),
|
||||
}),
|
||||
createTestDefinitions(group2, true, { overwrite, spaceId, singleRequest }),
|
||||
];
|
||||
return {
|
||||
unauthorized: [
|
||||
createTestDefinitions(group1Importable, true, { overwrite, spaceId }),
|
||||
createTestDefinitions(group1NonImportable, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group1All, true, {
|
||||
unauthorizedRead: [
|
||||
...unauthorizedCommonTestDefinitions,
|
||||
createTestDefinitions(refOrigins, true, {
|
||||
overwrite,
|
||||
spaceId,
|
||||
singleRequest,
|
||||
responseBodyOverride: expectSavedObjectForbidden(['globaltype', 'isolatedtype']),
|
||||
responseBodyOverride: expectSavedObjectForbidden('bulk_get', ['index-pattern']),
|
||||
}),
|
||||
createTestDefinitions(group2, true, { overwrite, spaceId, singleRequest }),
|
||||
].flat(),
|
||||
unauthorizedWrite: [
|
||||
...unauthorizedCommonTestDefinitions,
|
||||
createTestDefinitions(refOrigins, true, { overwrite, spaceId, singleRequest }),
|
||||
].flat(),
|
||||
authorized: [
|
||||
createTestDefinitions(group1All, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group2, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(refOrigins, false, { overwrite, spaceId, singleRequest }),
|
||||
].flat(),
|
||||
};
|
||||
};
|
||||
|
@ -164,20 +187,20 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
? ' with createNewCopies enabled'
|
||||
: ''
|
||||
}`;
|
||||
const { unauthorized, authorized } = createTests(overwrite, createNewCopies, spaceId);
|
||||
const { unauthorizedRead, unauthorizedWrite, authorized } = createTests(
|
||||
overwrite,
|
||||
createNewCopies,
|
||||
spaceId
|
||||
);
|
||||
const _addTests = (user: TestUser, tests: ResolveImportErrorsTestDefinition[]) => {
|
||||
addTests(`${user.description}${suffix}`, { user, spaceId, tests });
|
||||
};
|
||||
|
||||
[
|
||||
users.noAccess,
|
||||
users.legacyAll,
|
||||
users.dualRead,
|
||||
users.readGlobally,
|
||||
users.readAtSpace,
|
||||
users.allAtOtherSpace,
|
||||
].forEach((user) => {
|
||||
_addTests(user, unauthorized);
|
||||
[users.noAccess, users.legacyAll, users.allAtOtherSpace].forEach((user) => {
|
||||
_addTests(user, unauthorizedRead);
|
||||
});
|
||||
[users.dualRead, users.readGlobally, users.readAtSpace].forEach((user) => {
|
||||
_addTests(user, unauthorizedWrite);
|
||||
});
|
||||
[users.dualAll, users.allGlobally, users.allAtSpace, users.superuser].forEach((user) => {
|
||||
_addTests(user, authorized);
|
||||
|
|
|
@ -6,77 +6,81 @@
|
|||
*/
|
||||
|
||||
import { SPACES } from '../../common/lib/spaces';
|
||||
import { testCaseFailures, getTestScenarios } from '../../common/lib/saved_object_test_utils';
|
||||
import { getTestScenarios } from '../../common/lib/saved_object_test_utils';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
import { importTestSuiteFactory, TEST_CASES as CASES } from '../../common/suites/import';
|
||||
import {
|
||||
importTestSuiteFactory,
|
||||
importTestCaseFailures,
|
||||
TEST_CASES as CASES,
|
||||
SPECIAL_TEST_CASES,
|
||||
} from '../../common/suites/import';
|
||||
|
||||
const {
|
||||
DEFAULT: { spaceId: DEFAULT_SPACE_ID },
|
||||
SPACE_1: { spaceId: SPACE_1_ID },
|
||||
SPACE_2: { spaceId: SPACE_2_ID },
|
||||
} = SPACES;
|
||||
const { fail400, fail409 } = testCaseFailures;
|
||||
const { failUnsupportedType, failConflict, failAmbiguousConflict, failMissingReferences } =
|
||||
importTestCaseFailures;
|
||||
const destinationId = (condition?: boolean) =>
|
||||
condition !== false ? { successParam: 'destinationId' } : {};
|
||||
const newCopy = () => ({ successParam: 'createNewCopy' });
|
||||
const ambiguousConflict = (suffix: string) => ({
|
||||
failure: 409 as 409,
|
||||
fail409Param: `ambiguous_conflict_${suffix}`,
|
||||
});
|
||||
|
||||
const createNewCopiesTestCases = () => {
|
||||
// for each outcome, if failure !== undefined then we expect to receive
|
||||
// for each outcome, if failureType !== undefined then we expect to receive
|
||||
// an error; otherwise, we expect to receive a success result
|
||||
const cases = Object.entries(CASES).filter(([key]) => key !== 'HIDDEN');
|
||||
return [
|
||||
...cases.map(([, val]) => ({ ...val, successParam: 'createNewCopies' })),
|
||||
{ ...CASES.HIDDEN, ...fail400() },
|
||||
...Object.entries(CASES).map(([, val]) => ({ ...val, successParam: 'createNewCopies' })),
|
||||
{ ...SPECIAL_TEST_CASES.HIDDEN, ...failUnsupportedType() }, // unsupported_type is an "unresolvable" error
|
||||
// Other special test cases are excluded because they can result in "resolvable" errors that will prevent the rest of the objects from
|
||||
// being created. The test suite assumes that when the createNewCopies option is enabled, all non-error results are actually created,
|
||||
// and it makes assertions based on that.
|
||||
];
|
||||
};
|
||||
|
||||
const createTestCases = (overwrite: boolean, spaceId: string) => {
|
||||
// for each outcome, if failure !== undefined then we expect to receive
|
||||
// for each outcome, if failureType !== undefined then we expect to receive
|
||||
// an error; otherwise, we expect to receive a success result
|
||||
const group1 = [
|
||||
// when overwrite=true, all of the objects in this group are created successfully, so we can check the created object attributes
|
||||
{
|
||||
...CASES.SINGLE_NAMESPACE_DEFAULT_SPACE,
|
||||
...fail409(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
...failConflict(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
},
|
||||
{ ...CASES.SINGLE_NAMESPACE_SPACE_1, ...fail409(!overwrite && spaceId === SPACE_1_ID) },
|
||||
{ ...CASES.SINGLE_NAMESPACE_SPACE_2, ...fail409(!overwrite && spaceId === SPACE_2_ID) },
|
||||
{ ...CASES.MULTI_NAMESPACE_ALL_SPACES, ...fail409(!overwrite) },
|
||||
{ ...CASES.SINGLE_NAMESPACE_SPACE_1, ...failConflict(!overwrite && spaceId === SPACE_1_ID) },
|
||||
{ ...CASES.SINGLE_NAMESPACE_SPACE_2, ...failConflict(!overwrite && spaceId === SPACE_2_ID) },
|
||||
{ ...CASES.MULTI_NAMESPACE_ALL_SPACES, ...failConflict(!overwrite) },
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_DEFAULT_AND_SPACE_1,
|
||||
...fail409(!overwrite && (spaceId === DEFAULT_SPACE_ID || spaceId === SPACE_1_ID)),
|
||||
...failConflict(!overwrite && (spaceId === DEFAULT_SPACE_ID || spaceId === SPACE_1_ID)),
|
||||
...destinationId(spaceId !== DEFAULT_SPACE_ID && spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ONLY_SPACE_1,
|
||||
...fail409(!overwrite && spaceId === SPACE_1_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_1_ID),
|
||||
...destinationId(spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ONLY_SPACE_2,
|
||||
...fail409(!overwrite && spaceId === SPACE_2_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_2_ID),
|
||||
...destinationId(spaceId !== SPACE_2_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ISOLATED_ONLY_DEFAULT_SPACE,
|
||||
...fail409(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
...failConflict(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
...destinationId(spaceId !== DEFAULT_SPACE_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ISOLATED_ONLY_SPACE_1,
|
||||
...fail409(!overwrite && spaceId === SPACE_1_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_1_ID),
|
||||
...destinationId(spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{ ...CASES.NAMESPACE_AGNOSTIC, ...fail409(!overwrite) },
|
||||
{ ...CASES.HIDDEN, ...fail400() },
|
||||
{ ...CASES.NAMESPACE_AGNOSTIC, ...failConflict(!overwrite) },
|
||||
{ ...SPECIAL_TEST_CASES.HIDDEN, ...failUnsupportedType() },
|
||||
{ ...CASES.CONFLICT_1A_OBJ, ...newCopy() }, // "ambiguous source" conflict which results in a new destination ID and empty origin ID
|
||||
{ ...CASES.CONFLICT_1B_OBJ, ...newCopy() }, // "ambiguous source" conflict which results in a new destination ID and empty origin ID
|
||||
{ ...CASES.CONFLICT_3A_OBJ, ...fail409(!overwrite), ...destinationId() }, // "inexact match" conflict
|
||||
{ ...CASES.CONFLICT_4_OBJ, ...fail409(!overwrite), ...destinationId() }, // "inexact match" conflict
|
||||
{ ...CASES.CONFLICT_3A_OBJ, ...failConflict(!overwrite), ...destinationId() }, // "inexact match" conflict
|
||||
{ ...CASES.CONFLICT_4_OBJ, ...failConflict(!overwrite), ...destinationId() }, // "inexact match" conflict
|
||||
CASES.NEW_SINGLE_NAMESPACE_OBJ,
|
||||
CASES.NEW_MULTI_NAMESPACE_OBJ,
|
||||
CASES.NEW_NAMESPACE_AGNOSTIC_OBJ,
|
||||
|
@ -84,17 +88,36 @@ const createTestCases = (overwrite: boolean, spaceId: string) => {
|
|||
const group2 = [
|
||||
// when overwrite=true, all of the objects in this group are errors, so we cannot check the created object attributes
|
||||
// grouping errors together simplifies the test suite code
|
||||
{ ...CASES.CONFLICT_2C_OBJ, ...ambiguousConflict('2c') }, // "ambiguous destination" conflict
|
||||
{ ...CASES.CONFLICT_2C_OBJ, ...failAmbiguousConflict() }, // "ambiguous destination" conflict
|
||||
];
|
||||
const group3 = [
|
||||
// This group needs to be executed *after* the previous test case, because those error assertions include metadata of the destinations,
|
||||
// and *these* test cases would change that metadata.
|
||||
{ ...CASES.CONFLICT_2A_OBJ, ...failConflict(!overwrite) }, // "exact match" conflict with 2a
|
||||
{
|
||||
// "inexact match" conflict with 2b (since 2a already has a conflict source, this is not an ambiguous destination conflict)
|
||||
...CASES.CONFLICT_2C_OBJ,
|
||||
...failConflict(!overwrite),
|
||||
...destinationId(),
|
||||
expectedNewId: 'conflict_2b',
|
||||
},
|
||||
];
|
||||
const group4 = [
|
||||
// when overwrite=true, all of the objects in this group are created successfully, so we can check the created object attributes
|
||||
{ ...CASES.CONFLICT_1_OBJ, ...fail409(!overwrite) }, // "exact match" conflict
|
||||
{ ...CASES.CONFLICT_1_OBJ, ...failConflict(!overwrite) }, // "exact match" conflict
|
||||
CASES.CONFLICT_1A_OBJ, // no conflict because CONFLICT_1_OBJ is an exact match
|
||||
CASES.CONFLICT_1B_OBJ, // no conflict because CONFLICT_1_OBJ is an exact match
|
||||
{ ...CASES.CONFLICT_2C_OBJ, ...newCopy() }, // "ambiguous source and destination" conflict which results in a new destination ID and empty origin ID
|
||||
{ ...CASES.CONFLICT_2D_OBJ, ...newCopy() }, // "ambiguous source and destination" conflict which results in a new destination ID and empty origin ID
|
||||
];
|
||||
return { group1, group2, group3 };
|
||||
const refOrigins = [
|
||||
// One of these cases will always generate a missing_references error, which is an "unresolvable" error that stops any other objects
|
||||
// from being created in the import. Other test cases can have assertions based on the created objects' attributes when the overwrite
|
||||
// option is enabled, but these test cases are simply asserting pass/fail, so this group needs to be tested separately.
|
||||
{ ...SPECIAL_TEST_CASES.OUTBOUND_REFERENCE_ORIGIN_MATCH_1_OBJ },
|
||||
{ ...SPECIAL_TEST_CASES.OUTBOUND_REFERENCE_ORIGIN_MATCH_2_OBJ, ...failMissingReferences() },
|
||||
];
|
||||
return { group1, group2, group3, group4, refOrigins };
|
||||
};
|
||||
|
||||
export default function ({ getService }: FtrProviderContext) {
|
||||
|
@ -110,12 +133,18 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
return createTestDefinitions(cases, false, { createNewCopies, spaceId, singleRequest });
|
||||
}
|
||||
|
||||
const { group1, group2, group3 } = createTestCases(overwrite, spaceId);
|
||||
return [
|
||||
const { group1, group2, group3, group4, refOrigins } = createTestCases(overwrite, spaceId);
|
||||
const tests = [
|
||||
createTestDefinitions(group1, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group2, false, { overwrite, spaceId, singleRequest }),
|
||||
createTestDefinitions(group3, false, { overwrite, spaceId, singleRequest }),
|
||||
].flat();
|
||||
createTestDefinitions(group4, false, { overwrite, spaceId, singleRequest }),
|
||||
];
|
||||
if (!overwrite) {
|
||||
// Only include this group of test cases if the overwrite option is not enabled
|
||||
tests.push(createTestDefinitions(refOrigins, false, { overwrite, spaceId, singleRequest }));
|
||||
}
|
||||
return tests.flat();
|
||||
};
|
||||
|
||||
describe('_import', () => {
|
||||
|
|
|
@ -7,11 +7,13 @@
|
|||
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { SPACES } from '../../common/lib/spaces';
|
||||
import { testCaseFailures, getTestScenarios } from '../../common/lib/saved_object_test_utils';
|
||||
import { getTestScenarios } from '../../common/lib/saved_object_test_utils';
|
||||
import { FtrProviderContext } from '../../common/ftr_provider_context';
|
||||
import {
|
||||
resolveImportErrorsTestSuiteFactory,
|
||||
resolveImportErrorsTestCaseFailures,
|
||||
TEST_CASES as CASES,
|
||||
SPECIAL_TEST_CASES,
|
||||
} from '../../common/suites/resolve_import_errors';
|
||||
|
||||
const {
|
||||
|
@ -19,27 +21,28 @@ const {
|
|||
SPACE_1: { spaceId: SPACE_1_ID },
|
||||
SPACE_2: { spaceId: SPACE_2_ID },
|
||||
} = SPACES;
|
||||
const { fail400, fail409 } = testCaseFailures;
|
||||
const { failUnsupportedType, failConflict } = resolveImportErrorsTestCaseFailures;
|
||||
const destinationId = (condition?: boolean) =>
|
||||
condition !== false ? { successParam: 'destinationId' } : {};
|
||||
const newCopy = () => ({ successParam: 'createNewCopy' });
|
||||
|
||||
const createNewCopiesTestCases = () => {
|
||||
// for each outcome, if failure !== undefined then we expect to receive
|
||||
// for each outcome, if failureType !== undefined then we expect to receive
|
||||
// an error; otherwise, we expect to receive a success result
|
||||
const cases = Object.entries(CASES).filter(([key]) => key !== 'HIDDEN');
|
||||
return [
|
||||
...cases.map(([, val]) => ({
|
||||
...Object.entries(CASES).map(([, val]) => ({
|
||||
...val,
|
||||
successParam: 'createNewCopies',
|
||||
expectedNewId: uuidv4(),
|
||||
})),
|
||||
{ ...CASES.HIDDEN, ...fail400() },
|
||||
{ ...SPECIAL_TEST_CASES.HIDDEN, ...failUnsupportedType() }, // unsupported_type is an "unresolvable" error
|
||||
// Other special test cases are excluded here for simplicity and consistency with the resolveImportErrors "spaces_and_security" test
|
||||
// suite and the import test suites.
|
||||
];
|
||||
};
|
||||
|
||||
const createTestCases = (overwrite: boolean, spaceId: string) => {
|
||||
// for each outcome, if failure !== undefined then we expect to receive
|
||||
// for each outcome, if failureType !== undefined then we expect to receive
|
||||
// an error; otherwise, we expect to receive a success result
|
||||
const singleNamespaceObject =
|
||||
spaceId === DEFAULT_SPACE_ID
|
||||
|
@ -48,43 +51,45 @@ const createTestCases = (overwrite: boolean, spaceId: string) => {
|
|||
? CASES.SINGLE_NAMESPACE_SPACE_1
|
||||
: CASES.SINGLE_NAMESPACE_SPACE_2;
|
||||
return [
|
||||
{ ...singleNamespaceObject, ...fail409(!overwrite) },
|
||||
{ ...CASES.MULTI_NAMESPACE_ALL_SPACES, ...fail409(!overwrite) },
|
||||
{ ...singleNamespaceObject, ...failConflict(!overwrite) },
|
||||
{ ...CASES.MULTI_NAMESPACE_ALL_SPACES, ...failConflict(!overwrite) },
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_DEFAULT_AND_SPACE_1,
|
||||
...fail409(!overwrite && (spaceId === DEFAULT_SPACE_ID || spaceId === SPACE_1_ID)),
|
||||
...failConflict(!overwrite && (spaceId === DEFAULT_SPACE_ID || spaceId === SPACE_1_ID)),
|
||||
...destinationId(spaceId !== DEFAULT_SPACE_ID && spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ONLY_SPACE_1,
|
||||
...fail409(!overwrite && spaceId === SPACE_1_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_1_ID),
|
||||
...destinationId(spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ONLY_SPACE_2,
|
||||
...fail409(!overwrite && spaceId === SPACE_2_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_2_ID),
|
||||
...destinationId(spaceId !== SPACE_2_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ISOLATED_ONLY_DEFAULT_SPACE,
|
||||
...fail409(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
...failConflict(!overwrite && spaceId === DEFAULT_SPACE_ID),
|
||||
...destinationId(spaceId !== DEFAULT_SPACE_ID),
|
||||
},
|
||||
{
|
||||
...CASES.MULTI_NAMESPACE_ISOLATED_ONLY_SPACE_1,
|
||||
...fail409(!overwrite && spaceId === SPACE_1_ID),
|
||||
...failConflict(!overwrite && spaceId === SPACE_1_ID),
|
||||
...destinationId(spaceId !== SPACE_1_ID),
|
||||
},
|
||||
{ ...CASES.NAMESPACE_AGNOSTIC, ...fail409(!overwrite) },
|
||||
{ ...CASES.HIDDEN, ...fail400() },
|
||||
{ ...CASES.NAMESPACE_AGNOSTIC, ...failConflict(!overwrite) },
|
||||
{ ...SPECIAL_TEST_CASES.HIDDEN, ...failUnsupportedType() },
|
||||
{ ...CASES.CONFLICT_1A_OBJ, ...newCopy() }, // "ambiguous source" conflict which results in a new destination ID and empty origin ID
|
||||
{ ...CASES.CONFLICT_1B_OBJ, ...newCopy() }, // "ambiguous source" conflict which results in a new destination ID and empty origin ID
|
||||
// all of the cases below represent imports that had an inexact match conflict or an ambiguous conflict
|
||||
// if we call _resolve_import_errors and don't specify overwrite, each of these will result in a conflict because an object with that
|
||||
// `expectedDestinationId` already exists
|
||||
{ ...CASES.CONFLICT_2C_OBJ, ...fail409(!overwrite), ...destinationId() }, // "ambiguous destination" conflict; if overwrite=true, will overwrite 'conflict_2a'
|
||||
{ ...CASES.CONFLICT_3A_OBJ, ...fail409(!overwrite), ...destinationId() }, // "inexact match" conflict; if overwrite=true, will overwrite 'conflict_3'
|
||||
{ ...CASES.CONFLICT_4_OBJ, ...fail409(!overwrite), ...destinationId() }, // "inexact match" conflict; if overwrite=true, will overwrite 'conflict_4a'
|
||||
{ ...CASES.CONFLICT_2C_OBJ, ...failConflict(!overwrite), ...destinationId() }, // "ambiguous destination" conflict; if overwrite=true, will overwrite 'conflict_2a'
|
||||
{ ...CASES.CONFLICT_3A_OBJ, ...failConflict(!overwrite), ...destinationId() }, // "inexact match" conflict; if overwrite=true, will overwrite 'conflict_3'
|
||||
{ ...CASES.CONFLICT_4_OBJ, ...failConflict(!overwrite), ...destinationId() }, // "inexact match" conflict; if overwrite=true, will overwrite 'conflict_4a'
|
||||
{ ...SPECIAL_TEST_CASES.OUTBOUND_REFERENCE_ORIGIN_MATCH_1_OBJ },
|
||||
{ ...SPECIAL_TEST_CASES.OUTBOUND_REFERENCE_ORIGIN_MATCH_2_OBJ },
|
||||
];
|
||||
};
|
||||
|
||||
|
|
|
@ -673,12 +673,12 @@
|
|||
{
|
||||
"type": "doc",
|
||||
"value": {
|
||||
"id": "sharedtype:conflict_1_default",
|
||||
"id": "sharedtype:conflict_1a_default",
|
||||
"index": ".kibana",
|
||||
"source": {
|
||||
"originId": "conflict_1",
|
||||
"originId": "conflict_1a",
|
||||
"sharedtype": {
|
||||
"title": "A shared saved-object in one space"
|
||||
"title": "This is used to test an inexact match conflict for an originId -> originId match"
|
||||
},
|
||||
"type": "sharedtype",
|
||||
"namespaces": ["default"],
|
||||
|
@ -691,12 +691,12 @@
|
|||
{
|
||||
"type": "doc",
|
||||
"value": {
|
||||
"id": "sharedtype:conflict_1_space_1",
|
||||
"id": "sharedtype:conflict_1a_space_1",
|
||||
"index": ".kibana",
|
||||
"source": {
|
||||
"originId": "conflict_1",
|
||||
"originId": "conflict_1a",
|
||||
"sharedtype": {
|
||||
"title": "A shared saved-object in one space"
|
||||
"title": "This is used to test an inexact match conflict for an originId -> originId match"
|
||||
},
|
||||
"type": "sharedtype",
|
||||
"namespaces": ["space_1"],
|
||||
|
@ -709,12 +709,100 @@
|
|||
{
|
||||
"type": "doc",
|
||||
"value": {
|
||||
"id": "sharedtype:conflict_1_space_2",
|
||||
"id": "sharedtype:conflict_1a_space_2",
|
||||
"index": ".kibana",
|
||||
"source": {
|
||||
"originId": "conflict_1",
|
||||
"originId": "conflict_1a",
|
||||
"sharedtype": {
|
||||
"title": "A shared saved-object in one space"
|
||||
"title": "This is used to test an inexact match conflict for an originId -> originId match"
|
||||
},
|
||||
"type": "sharedtype",
|
||||
"namespaces": ["space_2"],
|
||||
"updated_at": "2017-09-21T18:59:16.270Z"
|
||||
},
|
||||
"type": "doc"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"type": "doc",
|
||||
"value": {
|
||||
"id": "sharedtype:conflict_1b_default",
|
||||
"index": ".kibana",
|
||||
"source": {
|
||||
"originId": "conflict_1b_space_2",
|
||||
"sharedtype": {
|
||||
"title": "This is used to test an inexact match conflict for an originId -> id match"
|
||||
},
|
||||
"type": "sharedtype",
|
||||
"namespaces": ["default"],
|
||||
"updated_at": "2017-09-21T18:59:16.270Z"
|
||||
},
|
||||
"type": "doc"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"type": "doc",
|
||||
"value": {
|
||||
"id": "sharedtype:conflict_1b_space_1",
|
||||
"index": ".kibana",
|
||||
"source": {
|
||||
"originId": "conflict_1b_space_2",
|
||||
"sharedtype": {
|
||||
"title": "This is used to test an inexact match conflict for an originId -> id match"
|
||||
},
|
||||
"type": "sharedtype",
|
||||
"namespaces": ["space_1"],
|
||||
"updated_at": "2017-09-21T18:59:16.270Z"
|
||||
},
|
||||
"type": "doc"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"type": "doc",
|
||||
"value": {
|
||||
"id": "sharedtype:conflict_1b_space_2",
|
||||
"index": ".kibana",
|
||||
"source": {
|
||||
"sharedtype": {
|
||||
"title": "This is used to test an inexact match conflict for an originId -> id match"
|
||||
},
|
||||
"type": "sharedtype",
|
||||
"namespaces": ["space_2"],
|
||||
"updated_at": "2017-09-21T18:59:16.270Z"
|
||||
},
|
||||
"type": "doc"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"type": "doc",
|
||||
"value": {
|
||||
"id": "sharedtype:conflict_1c_default_and_space_1",
|
||||
"index": ".kibana",
|
||||
"source": {
|
||||
"sharedtype": {
|
||||
"title": "This is used to test an inexact match conflict for an id -> originId match"
|
||||
},
|
||||
"type": "sharedtype",
|
||||
"namespaces": ["default", "space_1"],
|
||||
"updated_at": "2017-09-21T18:59:16.270Z"
|
||||
},
|
||||
"type": "doc"
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
"type": "doc",
|
||||
"value": {
|
||||
"id": "sharedtype:conflict_1c_space_2",
|
||||
"index": ".kibana",
|
||||
"source": {
|
||||
"originId": "conflict_1c_default_and_space_1",
|
||||
"sharedtype": {
|
||||
"title": "This is used to test an inexact match conflict for an id -> originId match"
|
||||
},
|
||||
"type": "sharedtype",
|
||||
"namespaces": ["space_2"],
|
||||
|
|
|
@ -482,7 +482,9 @@ export function copyToSpaceTestSuiteFactory(
|
|||
const type = 'sharedtype';
|
||||
const noConflictId = `${spaceId}_only`;
|
||||
const exactMatchId = 'each_space';
|
||||
const inexactMatchId = `conflict_1_${spaceId}`;
|
||||
const inexactMatchIdA = `conflict_1a_${spaceId}`;
|
||||
const inexactMatchIdB = `conflict_1b_${spaceId}`;
|
||||
const inexactMatchIdC = `conflict_1c_default_and_space_1`;
|
||||
const ambiguousConflictId = `conflict_2_${spaceId}`;
|
||||
|
||||
const getResult = (response: TestResponse) => (response.body as CopyResponse).space_2;
|
||||
|
@ -560,22 +562,24 @@ export function copyToSpaceTestSuiteFactory(
|
|||
},
|
||||
},
|
||||
{
|
||||
testTitle: 'copying with an inexact match conflict',
|
||||
objects: [{ type, id: inexactMatchId }],
|
||||
testTitle:
|
||||
'copying with an inexact match conflict (a) - originId matches existing originId',
|
||||
objects: [{ type, id: inexactMatchIdA }],
|
||||
statusCode,
|
||||
response: async (response: TestResponse) => {
|
||||
if (outcome === 'authorized') {
|
||||
const { success, successCount, successResults, errors } = getResult(response);
|
||||
const title = 'A shared saved-object in one space';
|
||||
const title =
|
||||
'This is used to test an inexact match conflict for an originId -> originId match';
|
||||
const meta = { title, icon: 'beaker' };
|
||||
const destinationId = 'conflict_1_space_2';
|
||||
const destinationId = 'conflict_1a_space_2';
|
||||
if (createNewCopies) {
|
||||
expectNewCopyResponse(response, inexactMatchId, title);
|
||||
expectNewCopyResponse(response, inexactMatchIdA, title);
|
||||
} else if (overwrite) {
|
||||
expect(success).to.eql(true);
|
||||
expect(successCount).to.eql(1);
|
||||
expect(successResults).to.eql([
|
||||
{ type, id: inexactMatchId, meta, overwrite: true, destinationId },
|
||||
{ type, id: inexactMatchIdA, meta, overwrite: true, destinationId },
|
||||
]);
|
||||
expect(errors).to.be(undefined);
|
||||
} else {
|
||||
|
@ -586,7 +590,91 @@ export function copyToSpaceTestSuiteFactory(
|
|||
{
|
||||
error: { type: 'conflict', destinationId },
|
||||
type,
|
||||
id: inexactMatchId,
|
||||
id: inexactMatchIdA,
|
||||
title,
|
||||
meta,
|
||||
},
|
||||
]);
|
||||
}
|
||||
} else if (outcome === 'noAccess') {
|
||||
expectRouteForbiddenResponse(response);
|
||||
} else {
|
||||
// unauthorized read/write
|
||||
expectSavedObjectForbiddenResponse(response);
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
testTitle: 'copying with an inexact match conflict (b) - originId matches existing id',
|
||||
objects: [{ type, id: inexactMatchIdB }],
|
||||
statusCode,
|
||||
response: async (response: TestResponse) => {
|
||||
if (outcome === 'authorized') {
|
||||
const { success, successCount, successResults, errors } = getResult(response);
|
||||
const title =
|
||||
'This is used to test an inexact match conflict for an originId -> id match';
|
||||
const meta = { title, icon: 'beaker' };
|
||||
const destinationId = 'conflict_1b_space_2';
|
||||
if (createNewCopies) {
|
||||
expectNewCopyResponse(response, inexactMatchIdB, title);
|
||||
} else if (overwrite) {
|
||||
expect(success).to.eql(true);
|
||||
expect(successCount).to.eql(1);
|
||||
expect(successResults).to.eql([
|
||||
{ type, id: inexactMatchIdB, meta, overwrite: true, destinationId },
|
||||
]);
|
||||
expect(errors).to.be(undefined);
|
||||
} else {
|
||||
expect(success).to.eql(false);
|
||||
expect(successCount).to.eql(0);
|
||||
expect(successResults).to.be(undefined);
|
||||
expect(errors).to.eql([
|
||||
{
|
||||
error: { type: 'conflict', destinationId },
|
||||
type,
|
||||
id: inexactMatchIdB,
|
||||
title,
|
||||
meta,
|
||||
},
|
||||
]);
|
||||
}
|
||||
} else if (outcome === 'noAccess') {
|
||||
expectRouteForbiddenResponse(response);
|
||||
} else {
|
||||
// unauthorized read/write
|
||||
expectSavedObjectForbiddenResponse(response);
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
testTitle: 'copying with an inexact match conflict (c) - id matches existing originId',
|
||||
objects: [{ type, id: inexactMatchIdC }],
|
||||
statusCode,
|
||||
response: async (response: TestResponse) => {
|
||||
if (outcome === 'authorized') {
|
||||
const { success, successCount, successResults, errors } = getResult(response);
|
||||
const title =
|
||||
'This is used to test an inexact match conflict for an id -> originId match';
|
||||
const meta = { title, icon: 'beaker' };
|
||||
const destinationId = 'conflict_1c_space_2';
|
||||
if (createNewCopies) {
|
||||
expectNewCopyResponse(response, inexactMatchIdC, title);
|
||||
} else if (overwrite) {
|
||||
expect(success).to.eql(true);
|
||||
expect(successCount).to.eql(1);
|
||||
expect(successResults).to.eql([
|
||||
{ type, id: inexactMatchIdC, meta, overwrite: true, destinationId },
|
||||
]);
|
||||
expect(errors).to.be(undefined);
|
||||
} else {
|
||||
expect(success).to.eql(false);
|
||||
expect(successCount).to.eql(0);
|
||||
expect(successResults).to.be(undefined);
|
||||
expect(errors).to.eql([
|
||||
{
|
||||
error: { type: 'conflict', destinationId },
|
||||
type,
|
||||
id: inexactMatchIdC,
|
||||
title,
|
||||
meta,
|
||||
},
|
||||
|
|
|
@ -101,7 +101,7 @@ export function deleteTestSuiteFactory(es: Client, esArchiver: any, supertest: S
|
|||
|
||||
expect(buckets).to.eql(expectedBuckets);
|
||||
|
||||
// There were 15 multi-namespace objects.
|
||||
// There were 22 multi-namespace objects.
|
||||
// Since Space 2 was deleted, any multi-namespace objects that existed in that space
|
||||
// are updated to remove it, and of those, any that don't exist in any space are deleted.
|
||||
const multiNamespaceResponse = await es.search<Record<string, any>>({
|
||||
|
@ -110,8 +110,8 @@ export function deleteTestSuiteFactory(es: Client, esArchiver: any, supertest: S
|
|||
body: { query: { terms: { type: ['sharedtype'] } } },
|
||||
});
|
||||
const docs = multiNamespaceResponse.hits.hits;
|
||||
// Just 14 results, since spaces_2_only, conflict_1_space_2 and conflict_2_space_2 got deleted.
|
||||
expect(docs).length(14);
|
||||
// Just 17 results, since spaces_2_only, conflict_1a_space_2, conflict_1b_space_2, conflict_1c_space_2, and conflict_2_space_2 got deleted.
|
||||
expect(docs).length(17);
|
||||
docs.forEach((doc) => () => {
|
||||
const containsSpace2 = doc?._source?.namespaces.includes('space_2');
|
||||
expect(containsSpace2).to.eql(false);
|
||||
|
|
|
@ -323,7 +323,9 @@ export function resolveCopyToSpaceConflictsSuite(
|
|||
const statusCode = outcome === 'noAccess' ? 403 : 200;
|
||||
const type = 'sharedtype';
|
||||
const exactMatchId = 'each_space';
|
||||
const inexactMatchId = `conflict_1_${spaceId}`;
|
||||
const inexactMatchIdA = `conflict_1a_${spaceId}`;
|
||||
const inexactMatchIdB = `conflict_1b_${spaceId}`;
|
||||
const inexactMatchIdC = `conflict_1c_default_and_space_1`;
|
||||
const ambiguousConflictId = `conflict_2_${spaceId}`;
|
||||
|
||||
const createRetries = (overwriteRetry: Record<string, any>) => ({
|
||||
|
@ -350,10 +352,20 @@ export function resolveCopyToSpaceConflictsSuite(
|
|||
expect(success).to.eql(true);
|
||||
expect(successCount).to.eql(1);
|
||||
expect(errors).to.be(undefined);
|
||||
const title =
|
||||
id === exactMatchId
|
||||
? 'A shared saved-object in the default, space_1, and space_2 spaces'
|
||||
: 'A shared saved-object in one space';
|
||||
const title = (() => {
|
||||
switch (id) {
|
||||
case exactMatchId:
|
||||
return 'A shared saved-object in the default, space_1, and space_2 spaces';
|
||||
case inexactMatchIdA:
|
||||
return 'This is used to test an inexact match conflict for an originId -> originId match';
|
||||
case inexactMatchIdB:
|
||||
return 'This is used to test an inexact match conflict for an originId -> id match';
|
||||
case inexactMatchIdC:
|
||||
return 'This is used to test an inexact match conflict for an id -> originId match';
|
||||
default:
|
||||
return 'A shared saved-object in one space';
|
||||
}
|
||||
})();
|
||||
const meta = { title, icon: 'beaker' };
|
||||
expect(successResults).to.eql([
|
||||
{ type, id, meta, overwrite: true, ...(destinationId && { destinationId }) },
|
||||
|
@ -378,18 +390,61 @@ export function resolveCopyToSpaceConflictsSuite(
|
|||
},
|
||||
},
|
||||
{
|
||||
testTitle: 'copying with an inexact match conflict',
|
||||
objects: [{ type, id: inexactMatchId }],
|
||||
testTitle:
|
||||
'copying with an inexact match conflict (a) - originId matches existing originId',
|
||||
objects: [{ type, id: inexactMatchIdA }],
|
||||
retries: createRetries({
|
||||
type,
|
||||
id: inexactMatchId,
|
||||
id: inexactMatchIdA,
|
||||
overwrite: true,
|
||||
destinationId: 'conflict_1_space_2',
|
||||
destinationId: 'conflict_1a_space_2',
|
||||
}),
|
||||
statusCode,
|
||||
response: async (response: TestResponse) => {
|
||||
if (outcome === 'authorized') {
|
||||
expectSavedObjectSuccessResponse(response, inexactMatchId, 'conflict_1_space_2');
|
||||
expectSavedObjectSuccessResponse(response, inexactMatchIdA, 'conflict_1a_space_2');
|
||||
} else if (outcome === 'noAccess') {
|
||||
expectRouteForbiddenResponse(response);
|
||||
} else {
|
||||
// unauthorized read/write
|
||||
expectSavedObjectForbiddenResponse(response);
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
testTitle: 'copying with an inexact match conflict (b) - originId matches existing id',
|
||||
objects: [{ type, id: inexactMatchIdB }],
|
||||
retries: createRetries({
|
||||
type,
|
||||
id: inexactMatchIdB,
|
||||
overwrite: true,
|
||||
destinationId: 'conflict_1b_space_2',
|
||||
}),
|
||||
statusCode,
|
||||
response: async (response: TestResponse) => {
|
||||
if (outcome === 'authorized') {
|
||||
expectSavedObjectSuccessResponse(response, inexactMatchIdB, 'conflict_1b_space_2');
|
||||
} else if (outcome === 'noAccess') {
|
||||
expectRouteForbiddenResponse(response);
|
||||
} else {
|
||||
// unauthorized read/write
|
||||
expectSavedObjectForbiddenResponse(response);
|
||||
}
|
||||
},
|
||||
},
|
||||
{
|
||||
testTitle: 'copying with an inexact match conflict (c) - id matches existing originId',
|
||||
objects: [{ type, id: inexactMatchIdC }],
|
||||
retries: createRetries({
|
||||
type,
|
||||
id: inexactMatchIdC,
|
||||
overwrite: true,
|
||||
destinationId: 'conflict_1c_space_2',
|
||||
}),
|
||||
statusCode,
|
||||
response: async (response: TestResponse) => {
|
||||
if (outcome === 'authorized') {
|
||||
expectSavedObjectSuccessResponse(response, inexactMatchIdC, 'conflict_1c_space_2');
|
||||
} else if (outcome === 'noAccess') {
|
||||
expectRouteForbiddenResponse(response);
|
||||
} else {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue