[8.x] [Migrations] Backport ALL updates to integration tests (#194013)

# Backport

This will backport the following PRs from `main` to `8.x`:
 - https://github.com/elastic/kibana/pull/193328
 - https://github.com/elastic/kibana/pull/193856
 - https://github.com/elastic/kibana/pull/193696
 - https://github.com/elastic/kibana/pull/194151
This commit is contained in:
Gerard Soldevila 2024-09-27 10:06:10 +02:00 committed by GitHub
parent ed3b6102fc
commit c7b29d18dd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
49 changed files with 1864 additions and 3297 deletions

View file

@ -120,10 +120,7 @@ If you fail to remedy this, your upgrade to 8.0+ will fail with a message like:
[source,sh]
--------------------------------------------
Unable to complete saved object migrations for the [.kibana] index: Migration failed because some documents were found which use unknown saved object types:
- "firstDocId" (type "someType")
- "secondtDocId" (type "someType")
- "thirdDocId" (type "someOtherType")
Unable to complete saved object migrations for the [.kibana] index: Migration failed because some documents were found which use unknown saved object types: someType,someOtherType
To proceed with the migration you can configure Kibana to discard unknown saved objects for this migration.
--------------------------------------------

View file

@ -29,10 +29,7 @@ describe('extractUnknownDocFailureReason', () => {
},
])
).toMatchInlineSnapshot(`
"Migration failed because some documents were found which use unknown saved object types:
- \\"unknownType:12\\" (type: \\"unknownType\\")
- \\"anotherUnknownType:42\\" (type: \\"anotherUnknownType\\")
"Migration failed because some documents were found which use unknown saved object types: unknownType,anotherUnknownType
To proceed with the migration you can configure Kibana to discard unknown saved objects for this migration.
Please refer to some-url.co for more information."
`);

View file

@ -52,10 +52,12 @@ export function extractUnknownDocFailureReason(
resolveMigrationFailuresUrl: string,
unknownDocs: DocumentIdAndType[]
): string {
const typesSet = new Set(unknownDocs.map(({ type }) => type));
return (
`Migration failed because some documents were found which use unknown saved object types:\n` +
unknownDocs.map((doc) => `- "${doc.id}" (type: "${doc.type}")\n`).join('') +
`\nTo proceed with the migration you can configure Kibana to discard unknown saved objects for this migration.\n` +
`Migration failed because some documents were found which use unknown saved object types: ${Array.from(
typesSet.values()
)}\n` +
`To proceed with the migration you can configure Kibana to discard unknown saved objects for this migration.\n` +
`Please refer to ${resolveMigrationFailuresUrl} for more information.`
);
}

View file

@ -559,13 +559,21 @@ export const model = (currentState: State, resW: ResponseType<AllActionStates>):
deleteByQueryTaskId: res.right.taskId,
};
} else {
const reason = extractUnknownDocFailureReason(
stateP.migrationDocLinks.resolveMigrationFailures,
res.left.unknownDocs
);
return {
...stateP,
controlState: 'FATAL',
reason: extractUnknownDocFailureReason(
stateP.migrationDocLinks.resolveMigrationFailures,
res.left.unknownDocs
),
reason,
logs: [
...logs,
{
level: 'error',
message: reason,
},
],
};
}
} else if (stateP.controlState === 'CLEANUP_UNKNOWN_AND_EXCLUDED_WAIT_FOR_TASK') {
@ -700,13 +708,22 @@ export const model = (currentState: State, resW: ResponseType<AllActionStates>):
if (isTypeof(res.right, 'unknown_docs_found')) {
if (!stateP.discardUnknownObjects) {
const reason = extractUnknownDocFailureReason(
stateP.migrationDocLinks.resolveMigrationFailures,
res.right.unknownDocs
);
return {
...stateP,
controlState: 'FATAL',
reason: extractUnknownDocFailureReason(
stateP.migrationDocLinks.resolveMigrationFailures,
res.right.unknownDocs
),
reason,
logs: [
...logs,
{
level: 'error',
message: reason,
},
],
};
}
@ -879,6 +896,13 @@ export const model = (currentState: State, resW: ResponseType<AllActionStates>):
corruptDocumentIds: [],
transformErrors: [],
progress: createInitialProgress(),
logs: [
...logs,
{
level: 'info',
message: `REINDEX_SOURCE_TO_TEMP_OPEN_PIT PitId:${res.right.pitId}`,
},
],
};
} else {
throwBadResponse(stateP, res);

View file

@ -71,6 +71,7 @@ export interface CreateTestEsClusterOptions {
* `['key.1=val1', 'key.2=val2']`
*/
esArgs?: string[];
esVersion?: string;
esFrom?: string;
esServerlessOptions?: Pick<
ServerlessOptions,
@ -169,6 +170,7 @@ export function createTestEsCluster<
log,
writeLogsToPath,
basePath = Path.resolve(REPO_ROOT, '.es'),
esVersion = esTestConfig.getVersion(),
esFrom = esTestConfig.getBuildFrom(),
esServerlessOptions,
dataArchive,
@ -199,7 +201,7 @@ export function createTestEsCluster<
const esArgs = assignArgs(defaultEsArgs, customEsArgs);
const config = {
version: esTestConfig.getVersion(),
version: esVersion,
installPath: Path.resolve(basePath, clusterName),
sourcePath: Path.resolve(REPO_ROOT, '../elasticsearch'),
password,

View file

@ -1,132 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import path from 'path';
import { unlink } from 'fs/promises';
import type { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import type { InternalCoreStart } from '@kbn/core-lifecycle-server-internal';
import { Root } from '@kbn/core-root-server-internal';
import {
createTestServers,
createRootWithCorePlugins,
type TestElasticsearchUtils,
} from '@kbn/core-test-helpers-kbn-server';
import { ALL_SAVED_OBJECT_INDICES } from '@kbn/core-saved-objects-server';
const logFilePath = path.join(__dirname, '7.7.2_xpack_100k.log');
async function removeLogFile() {
// ignore errors if it doesn't exist
await unlink(logFilePath).catch(() => void 0);
}
/** Number of SO documents dropped during the migration because they belong to an unused type */
const UNUSED_SO_COUNT = 5;
describe('migration from 7.7.2-xpack with 100k objects', () => {
let esServer: TestElasticsearchUtils;
let root: Root;
let coreStart: InternalCoreStart;
let esClient: ElasticsearchClient;
beforeEach(() => {
jest.setTimeout(600000);
});
const startServers = async ({ dataArchive, oss }: { dataArchive: string; oss: boolean }) => {
const { startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(600000),
settings: {
es: {
license: 'trial',
dataArchive,
},
},
});
root = createRootWithCorePlugins(
{
migrations: {
skip: false,
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFilePath,
layout: {
type: 'json',
},
},
},
loggers: [
{
name: 'root',
level: 'info',
appenders: ['file'],
},
],
},
},
{
oss,
}
);
const startEsPromise = startES().then((es) => (esServer = es));
const startKibanaPromise = root
.preboot()
.then(() => root.setup())
.then(() => root.start())
.then((start) => {
coreStart = start;
esClient = coreStart.elasticsearch.client.asInternalUser;
});
await Promise.all([startEsPromise, startKibanaPromise]);
};
const stopServers = async () => {
if (root) {
await root.shutdown();
}
if (esServer) {
await esServer.stop();
}
await new Promise((resolve) => setTimeout(resolve, 20000));
};
beforeAll(async () => {
await removeLogFile();
await startServers({
oss: false,
dataArchive: path.join(__dirname, '..', 'archives', '7.7.2_xpack_100k_obj.zip'),
});
});
afterAll(async () => {
await stopServers();
});
it('copies all the document of the previous index to the new one', async () => {
const migratedIndexResponse = await esClient.count({
index: ALL_SAVED_OBJECT_INDICES,
});
const oldIndexResponse = await esClient.count({
index: '.kibana_1',
});
// Use a >= comparison since once Kibana has started it might create new
// documents like telemetry tasks
expect(migratedIndexResponse.count).toBeGreaterThanOrEqual(
oldIndexResponse.count - UNUSED_SO_COUNT
);
});
});

View file

@ -1,158 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import fs from 'fs/promises';
import type { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { Root } from '@kbn/core-root-server-internal';
import {
createRootWithCorePlugins,
createTestServers,
TestElasticsearchUtils,
} from '@kbn/core-test-helpers-kbn-server';
const logFilePath = Path.join(__dirname, '7_13_failed_action_tasks.log');
async function removeLogFile() {
// ignore errors if it doesn't exist
await fs.unlink(logFilePath).catch(() => void 0);
}
describe('migration from 7.13 to 7.14+ with many failed action_tasks', () => {
describe('if mappings are incompatible (reindex required)', () => {
let esServer: TestElasticsearchUtils;
let root: Root;
let startES: () => Promise<TestElasticsearchUtils>;
beforeAll(async () => {
await removeLogFile();
});
beforeEach(() => {
({ startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
dataArchive: Path.join(
__dirname,
'..',
'archives',
'7.13_1.5k_failed_action_tasks.zip'
),
},
},
}));
});
afterEach(async () => {
if (root) {
await root.shutdown();
}
if (esServer) {
await esServer.stop();
}
});
const getCounts = async (
kibanaIndexName = '.kibana',
taskManagerIndexName = '.kibana_task_manager'
): Promise<{ tasksCount: number; actionTaskParamsCount: number }> => {
const esClient: ElasticsearchClient = esServer.es.getClient();
const actionTaskParamsResponse = await esClient.count({
index: kibanaIndexName,
body: {
query: {
bool: { must: { term: { type: 'action_task_params' } } },
},
},
});
const tasksResponse = await esClient.count({
index: taskManagerIndexName,
body: {
query: {
bool: { must: { term: { type: 'task' } } },
},
},
});
return {
actionTaskParamsCount: actionTaskParamsResponse.count,
tasksCount: tasksResponse.count,
};
};
it('filters out all outdated action_task_params and action tasks', async () => {
esServer = await startES();
// Verify counts in current index before migration starts
expect(await getCounts()).toEqual({
actionTaskParamsCount: 2010,
tasksCount: 2020,
});
root = createRoot();
await root.preboot();
await root.setup();
await root.start();
// Bulk of tasks should have been filtered out of current index
const { actionTaskParamsCount, tasksCount } = await getCounts();
// Use toBeLessThan to avoid flakiness in the case that TM starts manipulating docs before the counts are taken
expect(actionTaskParamsCount).toBeLessThan(1000);
expect(tasksCount).toBeLessThan(1000);
const {
actionTaskParamsCount: oldIndexActionTaskParamsCount,
tasksCount: oldIndexTasksCount,
} = await getCounts('.kibana_7.13.5_001', '.kibana_task_manager_7.13.5_001');
// .kibana mappings changes are NOT compatible, we reindex and preserve old index's documents
expect(oldIndexActionTaskParamsCount).toEqual(2010);
// ATM .kibana_task_manager mappings changes are compatible, we skip reindex and actively delete unwanted documents
// if the mappings become incompatible in the future, the we will reindex and the old index must still contain all 2020 docs
// if the mappings remain compatible, we reuse the existing index and actively delete unwanted documents from it
expect(oldIndexTasksCount === 2020 || oldIndexTasksCount < 1000).toEqual(true);
});
});
});
function createRoot() {
return createRootWithCorePlugins(
{
migrations: {
skip: false,
batchSize: 250,
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFilePath,
layout: {
type: 'json',
},
},
},
loggers: [
{
name: 'root',
level: 'info',
appenders: ['file'],
},
],
},
},
{
oss: false,
}
);
}

View file

@ -1,278 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import Fs from 'fs';
import Util from 'util';
import { Env } from '@kbn/config';
import { REPO_ROOT } from '@kbn/repo-info';
import { getEnvOptions } from '@kbn/config-mocks';
import type { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { Root } from '@kbn/core-root-server-internal';
import { SearchTotalHits } from '@elastic/elasticsearch/lib/api/types';
import { getMigrationDocLink } from '../test_utils';
import {
createRootWithCorePlugins,
TestElasticsearchUtils,
createTestServers as createkbnServerTestServers,
} from '@kbn/core-test-helpers-kbn-server';
import {
MAIN_SAVED_OBJECT_INDEX,
TASK_MANAGER_SAVED_OBJECT_INDEX,
ANALYTICS_SAVED_OBJECT_INDEX,
} from '@kbn/core-saved-objects-server';
const migrationDocLink = getMigrationDocLink().resolveMigrationFailures;
const logFilePath = Path.join(__dirname, '7_13_corrupt_transform_failures.log');
const asyncUnlink = Util.promisify(Fs.unlink);
async function removeLogFile() {
// ignore errors if it doesn't exist
await asyncUnlink(logFilePath).catch(() => void 0);
}
describe('migration v2', () => {
let esServer: TestElasticsearchUtils;
let root: Root;
beforeAll(async () => {
await removeLogFile();
});
afterEach(async () => {
if (root) {
await root.shutdown();
}
if (esServer) {
await esServer.stop();
}
});
describe('when `migrations.discardCorruptObjects` does not match current kibana version', () => {
it('fails to migrate when corrupt objects and transform errors are encountered', async () => {
const { startES } = createTestServers();
root = createRoot();
esServer = await startES();
await rootPrebootAndSetup(root);
try {
await root.start();
} catch (err) {
const errorMessage = err.message;
const errorLines = errorMessage.split('\n');
const errorMessageWithoutStack = errorLines
.filter((line: string) => !line.includes(' at '))
.join('\n');
expect(errorMessageWithoutStack).toMatchInlineSnapshot(`
"Unable to complete saved object migrations for the [.kibana] index: Migrations failed. Reason: 7 corrupt saved object documents were found: P2SQfHkBs3dBRGh--No5, QGSZfHkBs3dBRGh-ANoD, QWSZfHkBs3dBRGh-hNob, QmSZfHkBs3dBRGh-w9qH, one, two, Q2SZfHkBs3dBRGh-9dp2
7 transformation errors were encountered:
- space:default: Error: Migration function for version 7.14.0 threw an error
Caused by:
TypeError: Cannot set properties of undefined (setting 'bar')
- space:first: Error: Migration function for version 7.14.0 threw an error
Caused by:
TypeError: Cannot set properties of undefined (setting 'bar')
- space:forth: Error: Migration function for version 7.14.0 threw an error
Caused by:
TypeError: Cannot set properties of undefined (setting 'bar')
- space:second: Error: Migration function for version 7.14.0 threw an error
Caused by:
TypeError: Cannot set properties of undefined (setting 'bar')
- space:fifth: Error: Migration function for version 7.14.0 threw an error
Caused by:
TypeError: Cannot set properties of undefined (setting 'bar')
- space:third: Error: Migration function for version 7.14.0 threw an error
Caused by:
TypeError: Cannot set properties of undefined (setting 'bar')
- space:sixth: Error: Migration function for version 7.14.0 threw an error
Caused by:
TypeError: Cannot set properties of undefined (setting 'bar')
To allow migrations to proceed, please delete or fix these documents.
Note that you can configure Kibana to automatically discard corrupt documents and transform errors for this migration.
Please refer to ${migrationDocLink} for more information."
`);
return;
}
// Fail test if above expression doesn't throw anything.
expect('to throw').toBe('but did not');
});
});
describe('when `migrations.discardCorruptObjects` matches current kibana version', () => {
it('proceeds with the migration, ignoring corrupt objects and transform errors', async () => {
const { startES } = createTestServers();
const currentVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
root = createRoot(currentVersion);
esServer = await startES();
await rootPrebootAndSetup(root);
await expect(root.start()).resolves.not.toThrowError();
// TODO check that the destination indices contain data, but NOT the conflicting objects
const esClient: ElasticsearchClient = esServer.es.getClient();
const docs = await esClient.search({
index: [
MAIN_SAVED_OBJECT_INDEX,
TASK_MANAGER_SAVED_OBJECT_INDEX,
ANALYTICS_SAVED_OBJECT_INDEX,
],
_source: false,
fields: ['_id'],
size: 50,
});
// 34 saved objects (11 tasks + 23 misc) + 14 corrupt (discarded) = 48 total in the old indices
expect((docs.hits.total as SearchTotalHits).value).toEqual(34);
expect(docs.hits.hits.map(({ _id }) => _id).sort()).toEqual([
'config:7.13.0',
'index-pattern:logs-*',
'index-pattern:metrics-*',
'task:Actions-actions_telemetry',
'task:Actions-cleanup_failed_action_executions',
'task:Alerting-alerting_health_check',
'task:Alerting-alerting_telemetry',
'task:Alerts-alerts_invalidate_api_keys',
'task:Lens-lens_telemetry',
'task:apm-telemetry-task',
'task:data_enhanced_search_sessions_monitor',
'task:endpoint:user-artifact-packager:1.0.0',
'task:security:endpoint-diagnostics:1.0.0',
'task:session_cleanup',
'ui-metric:console:DELETE_delete',
'ui-metric:console:GET_get',
'ui-metric:console:GET_search',
'ui-metric:console:POST_delete_by_query',
'ui-metric:console:POST_index',
'ui-metric:console:PUT_indices.put_mapping',
'usage-counters:uiCounter:21052021:click:global_search_bar:user_navigated_to_application',
'usage-counters:uiCounter:21052021:click:global_search_bar:user_navigated_to_application_unknown',
'usage-counters:uiCounter:21052021:count:console:DELETE_delete',
'usage-counters:uiCounter:21052021:count:console:GET_cat.aliases',
'usage-counters:uiCounter:21052021:count:console:GET_cat.indices',
'usage-counters:uiCounter:21052021:count:console:GET_get',
'usage-counters:uiCounter:21052021:count:console:GET_search',
'usage-counters:uiCounter:21052021:count:console:POST_delete_by_query',
'usage-counters:uiCounter:21052021:count:console:POST_index',
'usage-counters:uiCounter:21052021:count:console:PUT_indices.put_mapping',
'usage-counters:uiCounter:21052021:count:global_search_bar:search_focus',
'usage-counters:uiCounter:21052021:count:global_search_bar:search_request',
'usage-counters:uiCounter:21052021:count:global_search_bar:shortcut_used',
'usage-counters:uiCounter:21052021:loaded:console:opened_app',
]);
});
});
});
function createTestServers() {
return createkbnServerTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
// example of original 'foo' SO with corrupt id:
// _id: one
// {
// foo: {
// name: 'one',
// },
// type: 'foo',
// references: [],
// migrationVersion: {
// foo: '7.13.0',
// },
// "coreMigrationVersion": "7.13.0",
// "updated_at": "2021-05-16T18:16:45.450Z"
// },
// SO that will fail transformation:
// {
// type: 'space',
// space: {},
// },
//
//
dataArchive: Path.join(
__dirname,
'..',
'archives',
'7_13_corrupt_and_transform_failures_docs.zip'
),
},
},
});
}
function createRoot(discardCorruptObjects?: string) {
return createRootWithCorePlugins(
{
migrations: {
skip: false,
batchSize: 5,
discardCorruptObjects,
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFilePath,
layout: {
type: 'json',
},
},
},
loggers: [
{
name: 'root',
level: 'info',
appenders: ['file'],
},
],
},
},
{
oss: true,
}
);
}
async function rootPrebootAndSetup(root: Root) {
await root.preboot();
const coreSetup = await root.setup();
coreSetup.savedObjects.registerType({
name: 'foo',
hidden: false,
mappings: {
properties: {},
},
namespaceType: 'agnostic',
migrations: {
'7.14.0': (doc) => doc,
},
});
// registering the `space` type with a throwing migration fn to avoid the migration failing for unknown types
coreSetup.savedObjects.registerType({
name: 'space',
hidden: false,
mappings: {
properties: {},
},
namespaceType: 'single',
migrations: {
'7.14.0': (doc) => {
doc.attributes.foo.bar = 12;
return doc;
},
},
});
}

View file

@ -1,165 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import fs from 'fs/promises';
import type { IndicesIndexSettings } from '@elastic/elasticsearch/lib/api/types';
import { Env } from '@kbn/config';
import { REPO_ROOT } from '@kbn/repo-info';
import { getEnvOptions } from '@kbn/config-mocks';
import type { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { Root } from '@kbn/core-root-server-internal';
import {
createRootWithCorePlugins,
createTestServers,
type TestElasticsearchUtils,
} from '@kbn/core-test-helpers-kbn-server';
const logFilePath = Path.join(__dirname, '7_13_unknown_types.log');
async function removeLogFile() {
// ignore errors if it doesn't exist
await fs.unlink(logFilePath).catch(() => void 0);
}
describe('migration v2', () => {
let esServer: TestElasticsearchUtils;
let root: Root;
let startES: () => Promise<TestElasticsearchUtils>;
beforeAll(async () => {
await removeLogFile();
});
beforeEach(() => {
({ startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
// dataset contains 2 type of unknown docs
// `foo` documents
// `space` documents (to mimic a migration with disabled plugins)
dataArchive: Path.join(__dirname, '..', 'archives', '7.13.0_with_unknown_so.zip'),
},
},
}));
});
afterEach(async () => {
if (root) {
await root.shutdown();
}
if (esServer) {
await esServer.stop();
}
});
describe('when `discardUnknownObjects` does not match current kibana version', () => {
it('fails the migration if unknown types are found in the source index', async () => {
// Start kibana with foo and space types disabled
root = createRoot('7.13.0');
esServer = await startES();
await root.preboot();
await root.setup();
try {
await root.start();
expect('should have thrown').toEqual('but it did not');
} catch (err) {
const errorMessage = err.message;
expect(
errorMessage.startsWith(
'Unable to complete saved object migrations for the [.kibana] index: Migration failed because some documents ' +
'were found which use unknown saved object types:'
)
).toBeTruthy();
const unknownDocs = [
{ type: 'space', id: 'space:default' },
{ type: 'space', id: 'space:first' },
{ type: 'space', id: 'space:second' },
{ type: 'space', id: 'space:third' },
{ type: 'space', id: 'space:forth' },
{ type: 'space', id: 'space:fifth' },
{ type: 'space', id: 'space:sixth' },
{ type: 'foo', id: 'P2SQfHkBs3dBRGh--No5' },
{ type: 'foo', id: 'QGSZfHkBs3dBRGh-ANoD' },
{ type: 'foo', id: 'QWSZfHkBs3dBRGh-hNob' },
];
unknownDocs.forEach(({ id, type }) => {
expect(errorMessage).toEqual(expect.stringContaining(`- "${id}" (type: "${type}")`));
});
const client = esServer.es.getClient();
const { body: response } = await client.indices.getSettings(
{ index: '.kibana_7.13.0_001' },
{ meta: true }
);
const settings = response['.kibana_7.13.0_001'].settings as IndicesIndexSettings;
expect(settings.index).not.toBeUndefined();
expect(settings.index!.blocks?.write).not.toEqual('true');
}
});
});
describe('when `discardUnknownObjects` matches current kibana version', () => {
const currentVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
it('discards the documents with unknown types and finishes the migration successfully', async () => {
// Start kibana with foo and space types disabled
root = createRoot(currentVersion);
esServer = await startES();
await root.preboot();
await root.setup();
// the migration process should finish successfully
await expect(root.start()).resolves.not.toThrowError();
const esClient: ElasticsearchClient = esServer.es.getClient();
const body = await esClient.count({ q: 'type:foo|space' });
expect(body.count).toEqual(0);
});
});
});
function createRoot(discardUnknownObjects?: string) {
return createRootWithCorePlugins(
{
migrations: {
skip: false,
batchSize: 5,
discardUnknownObjects,
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFilePath,
layout: {
type: 'json',
},
},
},
loggers: [
{
name: 'root',
level: 'info',
appenders: ['file'],
},
],
},
},
{
oss: true,
}
);
}

View file

@ -0,0 +1,504 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`v2 migration to a newer stack version with transform errors collects corrupt saved object documents across batches 1`] = `
"Error: Cannot convert 'complex' objects with values that are multiple of 100 4a5f8a3b-1fd7-4e85-930c-325f674fc67a
Error: Cannot convert 'complex' objects with values that are multiple of 100 1aea9914-5aaa-4e03-ac9f-54858ef7f24f
Error: Cannot convert 'complex' objects with values that are multiple of 100 6b99e3ca-b047-4733-af65-dcf92b7053a7
Error: Cannot convert 'complex' objects with values that are multiple of 100 e455a737-297f-49ec-abd4-cfb6e97371b1
Error: Cannot convert 'complex' objects with values that are multiple of 100 9fcb44b8-402f-4ccd-8a2b-18ffb6cc2a3f
Error: Cannot convert 'complex' objects with values that are multiple of 100 1784ecc3-5037-4526-a104-861e6e63a33e
Error: Cannot convert 'complex' objects with values that are multiple of 100 9096d85e-27bf-4219-90ea-2d5dbbe2b349
Error: Cannot convert 'complex' objects with values that are multiple of 100 de7b3476-dd21-4bf4-bdde-a3487936525f
Error: Cannot convert 'complex' objects with values that are multiple of 100 85f734f3-7546-484b-8938-988f9acc8f57
Error: Cannot convert 'complex' objects with values that are multiple of 100 79449e52-898c-4bdd-a9a7-bf5d5ccebab9
Error: Cannot convert 'complex' objects with values that are multiple of 100 dc86ccfb-af11-4bb8-8391-58f46c83653d
Error: Cannot convert 'complex' objects with values that are multiple of 100 ec43e7df-7f38-4daa-a30e-3f21c4f5afb6
Error: Cannot convert 'complex' objects with values that are multiple of 100 c060e0dc-0087-447a-96bf-c95e183163d0
Error: Cannot convert 'complex' objects with values that are multiple of 100 7b80ca14-ae5e-4638-8fae-74b6cd500525
Error: Cannot convert 'complex' objects with values that are multiple of 100 4179189d-c357-4427-b794-9e0a6ceaf394
Error: Cannot convert 'complex' objects with values that are multiple of 100 cc5c43ee-82d2-4fd3-b148-33f15358ba9d
Error: Cannot convert 'complex' objects with values that are multiple of 100 3368e24c-e15e-403c-9166-2679143dbb0b
Error: Cannot convert 'complex' objects with values that are multiple of 100 e333f8bb-15c8-4a48-a492-22761e05a433
Error: Cannot convert 'complex' objects with values that are multiple of 100 2d3c2c02-e43c-4ac0-bb0a-e89155fc4b70
Error: Cannot convert 'complex' objects with values that are multiple of 100 c5464163-96ef-43b0-a68d-501c01423cf5
Error: Cannot convert 'complex' objects with values that are multiple of 100 85eb647d-5920-4b3f-9cfc-668f50f0f03c
Error: Cannot convert 'complex' objects with values that are multiple of 100 e45666bd-bd2b-4c70-9bfe-6136d204fb87
Error: Cannot convert 'complex' objects with values that are multiple of 100 e6b85add-9751-4b0f-9f4e-ae6ccb3d2aff
Error: Cannot convert 'complex' objects with values that are multiple of 100 25ad5047-ad2d-48a7-99ca-5c719e885030
Error: Cannot convert 'complex' objects with values that are multiple of 100 b7851f4e-4dcf-4079-b85b-e8042a771391
Error: Cannot convert 'complex' objects with values that are multiple of 100 0d1c187c-e662-42d0-8a33-20c22e1a7a05
Error: Cannot convert 'complex' objects with values that are multiple of 100 9ac865a1-8077-4adb-8dc2-801f853a6382
Error: Cannot convert 'complex' objects with values that are multiple of 100 a42cb68e-724c-4e72-b0f6-bcb5e254c4b7
Error: Cannot convert 'complex' objects with values that are multiple of 100 3806cf94-a766-4995-8e69-54c12f16b10b
Error: Cannot convert 'complex' objects with values that are multiple of 100 f75edb1f-290c-4a71-9706-bd113f0e5ee2
Error: Cannot convert 'complex' objects with values that are multiple of 100 12e2e42d-939b-4e00-8f84-9fedea65a348
Error: Cannot convert 'complex' objects with values that are multiple of 100 0f909264-3f67-4448-8c4b-7e36b3170213
Error: Cannot convert 'complex' objects with values that are multiple of 100 6a547b6d-0d19-4a80-b5f1-bea929257451
Error: Cannot convert 'complex' objects with values that are multiple of 100 6b447619-0b02-45fd-acd5-dcf65ff4167d
Error: Cannot convert 'complex' objects with values that are multiple of 100 28d721a2-0ec6-46fa-9bcf-08be65342324
Error: Cannot convert 'complex' objects with values that are multiple of 100 7f80381e-753f-45ce-91dc-1b52e303bd24
Error: Cannot convert 'complex' objects with values that are multiple of 100 49977d85-73be-4326-a82c-6cfc8b063f03
Error: Cannot convert 'complex' objects with values that are multiple of 100 0a95b8bf-b563-467f-af9b-b6df19cd94bf
Error: Cannot convert 'complex' objects with values that are multiple of 100 6965d38d-6614-4f0d-9c3e-51cd1f97231a
Error: Cannot convert 'complex' objects with values that are multiple of 100 8f332c2a-d443-4082-b2bd-4a8801cf3953
Error: Cannot convert 'complex' objects with values that are multiple of 100 792615e1-18a6-4853-bb26-ba086a59037e
Error: Cannot convert 'complex' objects with values that are multiple of 100 5ad10f39-c330-4c92-b76f-c7fcd8d8d378
Error: Cannot convert 'complex' objects with values that are multiple of 100 d988b3e6-33bd-4825-bf1c-4446fa2bd526
Error: Cannot convert 'complex' objects with values that are multiple of 100 3cf1ef60-a850-4210-b7bb-b8fe1ea50221
Error: Cannot convert 'complex' objects with values that are multiple of 100 23e8c863-493c-476e-8bbe-571fef120ccb
Error: Cannot convert 'complex' objects with values that are multiple of 100 adbb86d6-02df-4f67-bf7e-e3b3dbfd3f8c
Error: Cannot convert 'complex' objects with values that are multiple of 100 7ffe6b2a-0b27-42f3-8aad-499c1e648a68
Error: Cannot convert 'complex' objects with values that are multiple of 100 4ff7e22f-a480-4f3a-9969-0f1bf060b85c
Error: Cannot convert 'complex' objects with values that are multiple of 100 8a207e2d-af6b-4c4f-adca-09108394f793
Error: Cannot convert 'complex' objects with values that are multiple of 100 250a845e-f8c4-4c07-bb9d-3985b8a87579
Error: Cannot convert 'complex' objects with values that are multiple of 100 0d67c49d-c601-4b21-bfe8-a69638354094
Error: Cannot convert 'complex' objects with values that are multiple of 100 cac1f217-e635-431d-b578-df934653d211
Error: Cannot convert 'complex' objects with values that are multiple of 100 4c8dd841-8912-433e-a32d-1f835496175b
Error: Cannot convert 'complex' objects with values that are multiple of 100 a4136099-777c-4162-9cac-c707a6ef2f1f
Error: Cannot convert 'complex' objects with values that are multiple of 100 750ce3fc-28f0-427b-aec0-904b66a9e820
Error: Cannot convert 'complex' objects with values that are multiple of 100 a251c40f-2327-46f2-8620-1beaafd8e117
Error: Cannot convert 'complex' objects with values that are multiple of 100 1ebd1cb2-3743-45f2-b8e8-e97b28ede1a0
Error: Cannot convert 'complex' objects with values that are multiple of 100 948460aa-19f3-4e46-a18b-c5894be04d03
Error: Cannot convert 'complex' objects with values that are multiple of 100 e4ab245e-0a23-4443-aef2-055c2379b99e
Error: Cannot convert 'complex' objects with values that are multiple of 100 c4715be2-4698-45c6-b02b-4b3109deceb3
Error: Cannot convert 'complex' objects with values that are multiple of 100 db7d6d3c-0a8d-4b1a-a80a-6353cf19f721
Error: Cannot convert 'complex' objects with values that are multiple of 100 22dd3886-dc11-41a8-96b0-90bd5869e05c
Error: Cannot convert 'complex' objects with values that are multiple of 100 b366b1fb-b995-4eb0-810a-cb06beca55af
Error: Cannot convert 'complex' objects with values that are multiple of 100 65b788da-921f-40aa-8d75-fe8d7a3e591f
Error: Cannot convert 'complex' objects with values that are multiple of 100 fffa3aa6-844c-449c-90df-db26aaa7a60f
Error: Cannot convert 'complex' objects with values that are multiple of 100 17358f0a-c161-45e5-ad20-7a2b4456c93c
Error: Cannot convert 'complex' objects with values that are multiple of 100 b2773c23-7d52-4913-9761-59775f42c7e9
Error: Cannot convert 'complex' objects with values that are multiple of 100 b04bea35-5c3c-4305-8dbc-2c7f0377da28
Error: Cannot convert 'complex' objects with values that are multiple of 100 764f6f97-9475-45fd-8424-5454fa51df4b
Error: Cannot convert 'complex' objects with values that are multiple of 100 66278ca9-2a71-452d-bfd8-ed3b442a8c66
Error: Cannot convert 'complex' objects with values that are multiple of 100 3f0acc8a-f3f6-43c1-b494-0c1058ad2f8a
Error: Cannot convert 'complex' objects with values that are multiple of 100 7c4c9835-2733-4427-a161-92bb497a506b
Error: Cannot convert 'complex' objects with values that are multiple of 100 bc975974-8a79-48f2-a576-1519c06cebdc
Error: Cannot convert 'complex' objects with values that are multiple of 100 f7511369-2372-4290-af7b-cd7ad67831ea
Error: Cannot convert 'complex' objects with values that are multiple of 100 82c41f11-810d-448f-889d-baf4c576c0e5
Error: Cannot convert 'complex' objects with values that are multiple of 100 cd7fb238-d622-4b3e-9cc8-31e49f990288
Error: Cannot convert 'complex' objects with values that are multiple of 100 a82f8da1-3875-441b-a4d5-722758176b85
Error: Cannot convert 'complex' objects with values that are multiple of 100 c2cd2a70-7680-4365-8cbe-1cee8a6988c1
Error: Cannot convert 'complex' objects with values that are multiple of 100 adfed55d-4863-4ae8-a469-d7d7accbec74
Error: Cannot convert 'complex' objects with values that are multiple of 100 bcb7d634-77aa-4616-8f06-ffa38789c95e
Error: Cannot convert 'complex' objects with values that are multiple of 100 70eb8bfe-28e2-4c5a-abbd-24d8dc339a5e
Error: Cannot convert 'complex' objects with values that are multiple of 100 2caba943-411e-41e9-9a80-0c7417070c26
Error: Cannot convert 'complex' objects with values that are multiple of 100 2f97b472-37c7-497f-b820-597736d32721
Error: Cannot convert 'complex' objects with values that are multiple of 100 2d880eb1-910c-4264-be51-6933b9c42123
Error: Cannot convert 'complex' objects with values that are multiple of 100 09139494-4e2c-4b70-ae23-bf35c27fac71
Error: Cannot convert 'complex' objects with values that are multiple of 100 9fbdbd0f-0dcc-4f7f-82e3-6ff078a0af8f
Error: Cannot convert 'complex' objects with values that are multiple of 100 fbc57d3e-5d79-436d-882d-d2eb6841b222
Error: Cannot convert 'complex' objects with values that are multiple of 100 617c6fd2-30ed-453d-8412-69b88e8d6a1c
Error: Cannot convert 'complex' objects with values that are multiple of 100 4d705701-0992-4b6e-bbb7-5ada757497b7
Error: Cannot convert 'complex' objects with values that are multiple of 100 941d2022-ddac-47a2-a1fa-dd0e9f4c1e3b
Error: Cannot convert 'complex' objects with values that are multiple of 100 1da87b1b-cf90-4974-8f3f-208422c6d53a
Error: Cannot convert 'complex' objects with values that are multiple of 100 56a0d4a4-3302-43e6-b0fc-d1643584affc
Error: Cannot convert 'complex' objects with values that are multiple of 100 2cad707a-3e4a-4195-9f77-fe4da18525a9
Error: Cannot convert 'complex' objects with values that are multiple of 100 2f2f7dda-f498-4994-802d-0455e42fa762
Error: Cannot convert 'complex' objects with values that are multiple of 100 855f59de-b63c-4fb7-93f7-5de0700aa01f
Error: Cannot convert 'complex' objects with values that are multiple of 100 4d4a694c-4978-4005-9406-c8b837f8c3b6
Error: Cannot convert 'complex' objects with values that are multiple of 100 031d2c9b-4d8e-4990-a835-268b11152cb7
Error: Cannot convert 'complex' objects with values that are multiple of 100 63ccf4e1-3889-4321-a303-6e811150305c
Error: Cannot convert 'complex' objects with values that are multiple of 100 dc9421c1-ad5c-4fbf-b39c-a508b530aa89
Error: Cannot convert 'complex' objects with values that are multiple of 100 f132f27b-1ce6-470c-a563-149a42e97628
Error: Cannot convert 'complex' objects with values that are multiple of 100 2e8e1338-aae4-4f2f-8468-7b70fb082f9e
Error: Cannot convert 'complex' objects with values that are multiple of 100 a88372ec-e57b-45a4-98f1-1318a9b19980
Error: Cannot convert 'complex' objects with values that are multiple of 100 0b4d0f30-96aa-4618-9107-8765873e37cd
Error: Cannot convert 'complex' objects with values that are multiple of 100 bf578005-a0d8-4053-9be6-c26d9811425c
Error: Cannot convert 'complex' objects with values that are multiple of 100 df80d98a-218d-4553-9870-18165351089d
Error: Cannot convert 'complex' objects with values that are multiple of 100 d9b501ee-e656-46bb-8161-8e40c0d0a864
Error: Cannot convert 'complex' objects with values that are multiple of 100 baee1ae1-0656-4671-94a8-97bb8359b53a
Error: Cannot convert 'complex' objects with values that are multiple of 100 ebd3a8cc-55b6-4d98-9ad5-1acf2fb0ecb8
Error: Cannot convert 'complex' objects with values that are multiple of 100 44dc45d3-eded-479a-9817-c2e571dc500d
Error: Cannot convert 'complex' objects with values that are multiple of 100 e357a77c-8adf-40ff-a62e-dc993023643f
Error: Cannot convert 'complex' objects with values that are multiple of 100 0164e955-bc7b-42b8-95b5-6030f112c73b
Error: Cannot convert 'complex' objects with values that are multiple of 100 dde6d486-f004-4ed4-8244-907c22bf7486
Error: Cannot convert 'complex' objects with values that are multiple of 100 f09daf45-316a-412c-b7f1-ddc9c905ea5e
Error: Cannot convert 'complex' objects with values that are multiple of 100 a24b53c2-8686-4796-8e13-17355d388183
Error: Cannot convert 'complex' objects with values that are multiple of 100 b6bbe8ae-ac3c-47be-b9b0-9addb21f1f28
Error: Cannot convert 'complex' objects with values that are multiple of 100 3096421f-ae90-4446-bca6-483069a7d39f
Error: Cannot convert 'complex' objects with values that are multiple of 100 2d3e4f20-9597-4899-8588-215695e49383
Error: Cannot convert 'complex' objects with values that are multiple of 100 1e925623-346f-46db-baa3-cad246b2b11d
Error: Cannot convert 'complex' objects with values that are multiple of 100 bd3d5a34-ab1f-444c-8cb9-b1ef6c0c9032
Error: Cannot convert 'complex' objects with values that are multiple of 100 23c8355f-8816-4302-9cb6-e495e41b6d2e
Error: Cannot convert 'complex' objects with values that are multiple of 100 54894296-1436-457b-932c-3c0744e38783
Error: Cannot convert 'complex' objects with values that are multiple of 100 074c7a30-324f-487a-afa7-258279c99ca6
Error: Cannot convert 'complex' objects with values that are multiple of 100 86048354-a13f-474e-9ead-0b0999536df5
Error: Cannot convert 'complex' objects with values that are multiple of 100 74e88bc1-d178-44db-936a-0977fdb28eb8
Error: Cannot convert 'complex' objects with values that are multiple of 100 c5f596fc-d748-4858-9745-570a8cb2448a
Error: Cannot convert 'complex' objects with values that are multiple of 100 be634794-0077-4ac9-b25a-a0af6fcd375c
Error: Cannot convert 'complex' objects with values that are multiple of 100 488d5d0e-528d-447f-b38c-fb20a932ce7f
Error: Cannot convert 'complex' objects with values that are multiple of 100 75cfab96-0449-40e6-8778-8b5de495dcce
Error: Cannot convert 'complex' objects with values that are multiple of 100 68e41ab6-4f47-4933-8911-86099f120c66
Error: Cannot convert 'complex' objects with values that are multiple of 100 e3239dac-add9-4477-a11a-c8c123373733
Error: Cannot convert 'complex' objects with values that are multiple of 100 057d64b6-8a90-48bc-b7fa-04645267b3a3
Error: Cannot convert 'complex' objects with values that are multiple of 100 c8514d0d-b7b5-4782-9c35-6643d6f732b9
Error: Cannot convert 'complex' objects with values that are multiple of 100 c2fb8bf7-8d99-4184-842c-8a9851789883
Error: Cannot convert 'complex' objects with values that are multiple of 100 f03447f7-3001-403c-a734-72425e5cee16
Error: Cannot convert 'complex' objects with values that are multiple of 100 6aaa87d5-6677-4424-aa31-4dbe612a56ad
Error: Cannot convert 'complex' objects with values that are multiple of 100 a48ba736-f710-40c8-b16f-4bc2b292aa88
Error: Cannot convert 'complex' objects with values that are multiple of 100 a57dc495-85d8-4e77-866a-aa94d04f9e00
Error: Cannot convert 'complex' objects with values that are multiple of 100 e22fdee4-87e2-4bb7-a42d-fc9d7dcbfd47
Error: Cannot convert 'complex' objects with values that are multiple of 100 d5f43bf2-ec06-46c9-9958-e08f5ed69942
Error: Cannot convert 'complex' objects with values that are multiple of 100 2e05366a-ca12-4ac8-ad95-e08c291fae5f
Error: Cannot convert 'complex' objects with values that are multiple of 100 2922a4df-33c6-4eb4-8046-29e4924faa72
Error: Cannot convert 'complex' objects with values that are multiple of 100 e8e5e334-d316-46fc-8d6a-367719055a72
Error: Cannot convert 'complex' objects with values that are multiple of 100 88db8e61-5311-49c3-8e94-0f2f20b15c76
Error: Cannot convert 'complex' objects with values that are multiple of 100 4da4e59e-2df1-4aea-b409-22c12444d9d9
Error: Cannot convert 'complex' objects with values that are multiple of 100 20369ba4-35b1-40bf-a3c2-a6b5b550b98f
Error: Cannot convert 'complex' objects with values that are multiple of 100 f3d94549-bdc4-4c4e-968f-392917479f3f
Error: Cannot convert 'complex' objects with values that are multiple of 100 f67b0815-185e-4cbf-aaee-ce41a1590b13
Error: Cannot convert 'complex' objects with values that are multiple of 100 e07c3be8-5e7a-491c-9476-b12d9ba20cae
Error: Cannot convert 'complex' objects with values that are multiple of 100 c9006788-cc7b-438c-aec2-6efc2dceea2e
Error: Cannot convert 'complex' objects with values that are multiple of 100 2110c607-9891-4339-a97d-413e5fede28a
Error: Cannot convert 'complex' objects with values that are multiple of 100 f9698018-115e-4242-8b2e-366ca5fb76c8
Error: Cannot convert 'complex' objects with values that are multiple of 100 2b33bb00-d5da-4b70-813b-0c858b5906ad
Error: Cannot convert 'complex' objects with values that are multiple of 100 b9f5a9e9-af5f-4734-b26a-d62dd5c573bb
Error: Cannot convert 'complex' objects with values that are multiple of 100 c5aaa1f0-ec44-413c-9f05-fc7c4635b314
Error: Cannot convert 'complex' objects with values that are multiple of 100 446e3be8-98f1-40e5-9725-f1819be4a656
Error: Cannot convert 'complex' objects with values that are multiple of 100 e4b89f9d-17aa-43fa-b43f-cc20c20a9417
Error: Cannot convert 'complex' objects with values that are multiple of 100 75ee547c-f007-43f3-bec4-faa3d493bb99
Error: Cannot convert 'complex' objects with values that are multiple of 100 8ce01347-17bb-4d97-b675-729f1efcadad
Error: Cannot convert 'complex' objects with values that are multiple of 100 5310c7f9-c18f-44dd-b348-aa8a2a22f7fe
Error: Cannot convert 'complex' objects with values that are multiple of 100 5895db46-302f-4f5c-a602-4556d911c9c0
Error: Cannot convert 'complex' objects with values that are multiple of 100 37039f4d-768c-441b-a2fa-6e1eb7865aa7
Error: Cannot convert 'complex' objects with values that are multiple of 100 b885217e-6bb1-452c-8b2b-47370617edcb
Error: Cannot convert 'complex' objects with values that are multiple of 100 75021416-740d-43f8-8136-3af81b3f4409
Error: Cannot convert 'complex' objects with values that are multiple of 100 eb886e03-2415-479c-86de-1905ee9959a6
Error: Cannot convert 'complex' objects with values that are multiple of 100 2dd12139-4f6c-4c51-a845-77c04189d629
Error: Cannot convert 'complex' objects with values that are multiple of 100 8ffb7e81-374c-4e64-8e03-4e1208631183
Error: Cannot convert 'complex' objects with values that are multiple of 100 03a5df77-b0de-4459-9565-ec5168a6b278
Error: Cannot convert 'complex' objects with values that are multiple of 100 78a30611-d7ae-4a2b-9ae4-b5d6444f204e
Error: Cannot convert 'complex' objects with values that are multiple of 100 5b4eaae7-4e62-4d36-bc21-6327ff3784e7
Error: Cannot convert 'complex' objects with values that are multiple of 100 956717ab-3c80-47e9-a282-65cddb6f4030
Error: Cannot convert 'complex' objects with values that are multiple of 100 fe7a5481-39f8-45fc-a678-ecf74fe70f91
Error: Cannot convert 'complex' objects with values that are multiple of 100 f1cd3546-d75c-44f5-b3f9-5d72553076dd
Error: Cannot convert 'complex' objects with values that are multiple of 100 7cba77c5-71c6-49f1-869f-e47711c81b78
Error: Cannot convert 'complex' objects with values that are multiple of 100 86e29a66-b5c7-4198-b5fc-495ae4584bd4
Error: Cannot convert 'complex' objects with values that are multiple of 100 ab733c82-c8d0-4d42-948d-eeaa04f76aed
Error: Cannot convert 'complex' objects with values that are multiple of 100 358d65d7-530e-4771-96a0-b4bf5a89f32e
Error: Cannot convert 'complex' objects with values that are multiple of 100 832954a4-be97-4f6a-b1cf-58947357643f
Error: Cannot convert 'complex' objects with values that are multiple of 100 567bdfbf-6ce3-404f-8d43-fd8f8ddc7efe
Error: Cannot convert 'complex' objects with values that are multiple of 100 d1defaa7-6892-43fc-b240-aaf156cd94fd
Error: Cannot convert 'complex' objects with values that are multiple of 100 7999835e-f577-4334-8da3-85b33fd4ef62
Error: Cannot convert 'complex' objects with values that are multiple of 100 f219ea6d-7f4b-45dc-8aa0-6b728a49c4b2
Error: Cannot convert 'complex' objects with values that are multiple of 100 f9b9feb1-dd16-439b-a444-2b308171e351
Error: Cannot convert 'complex' objects with values that are multiple of 100 fa453dea-5957-468c-ac16-3d05e0048fc5
Error: Cannot convert 'complex' objects with values that are multiple of 100 01b36bd1-c2fc-4d68-9ce4-09514eeee6a2
Error: Cannot convert 'complex' objects with values that are multiple of 100 e1d9231d-9df0-4a30-b2e5-c65a26e1137f
Error: Cannot convert 'complex' objects with values that are multiple of 100 3ffc162e-5b89-4001-843c-aa61aa7c65aa
Error: Cannot convert 'complex' objects with values that are multiple of 100 b8b1be10-a0d5-4952-abf4-45c51d138e62
Error: Cannot convert 'complex' objects with values that are multiple of 100 f346cd51-1018-45d1-b8c0-53061c678d07
Error: Cannot convert 'complex' objects with values that are multiple of 100 dbe6b20b-36e5-41ec-a369-8c92522c21ea
Error: Cannot convert 'complex' objects with values that are multiple of 100 bb5f21e8-7b65-4c4e-a17c-222f047e5dcf
Error: Cannot convert 'complex' objects with values that are multiple of 100 a0075350-72c2-4da6-be55-2e05ca7bb94b
Error: Cannot convert 'complex' objects with values that are multiple of 100 2730998f-a869-4623-9b89-778872d16f69
Error: Cannot convert 'complex' objects with values that are multiple of 100 a3e0916c-8f54-4468-8ad2-aed43996a860
Error: Cannot convert 'complex' objects with values that are multiple of 100 811004ec-c20d-4e13-a994-c3038392e259
Error: Cannot convert 'complex' objects with values that are multiple of 100 aca6ae67-3996-4d49-98dd-155b3589b672
Error: Cannot convert 'complex' objects with values that are multiple of 100 c6a0999f-83b5-42e6-80b6-4973c6799eba
Error: Cannot convert 'complex' objects with values that are multiple of 100 91cd6061-5e31-4d5f-a5de-a3f83d4a219d
Error: Cannot convert 'complex' objects with values that are multiple of 100 d9811e90-b526-436e-9465-21b63163d0f8
Error: Cannot convert 'complex' objects with values that are multiple of 100 c15c1f02-2537-4c94-b3f1-51d7d5495e97
Error: Cannot convert 'complex' objects with values that are multiple of 100 d5debf75-e8ca-4abc-9206-31b9e6a39b34
Error: Cannot convert 'complex' objects with values that are multiple of 100 08149288-b20a-48b1-b20b-b3435fd81c23
Error: Cannot convert 'complex' objects with values that are multiple of 100 98ccade0-55ae-452c-9dbe-b2bbb0137c4f
Error: Cannot convert 'complex' objects with values that are multiple of 100 7cdd5ff7-0ae0-42fd-bda8-19e7a7871b17
Error: Cannot convert 'complex' objects with values that are multiple of 100 20e0c753-6535-493e-b172-fd3a22c600ce
Error: Cannot convert 'complex' objects with values that are multiple of 100 6a41a295-2414-4ce5-89ed-750acfb6d7df
Error: Cannot convert 'complex' objects with values that are multiple of 100 38eb7ed3-f6df-4ecc-987e-5ff1ded3be0e
Error: Cannot convert 'complex' objects with values that are multiple of 100 c3dc24e2-bcfa-4b81-8a4b-4f0a2a015052
Error: Cannot convert 'complex' objects with values that are multiple of 100 e08dd05f-fd01-4f38-9bd8-9fc0f6e81fb3
Error: Cannot convert 'complex' objects with values that are multiple of 100 21f8dffe-c415-49e7-87ea-8a1a71cfc0b0
Error: Cannot convert 'complex' objects with values that are multiple of 100 39a5c4a7-13aa-46f3-9099-be7805b73b56
Error: Cannot convert 'complex' objects with values that are multiple of 100 e52db074-c374-4252-aa70-1cf6c5394a99
Error: Cannot convert 'complex' objects with values that are multiple of 100 b5fb653f-fe5e-46c5-8357-269c16994ea4
Error: Cannot convert 'complex' objects with values that are multiple of 100 cd4bc600-b72b-4914-9967-9691e997f9c5
Error: Cannot convert 'complex' objects with values that are multiple of 100 208f5351-1891-4b4d-98fe-8a9ef0a4a06a
Error: Cannot convert 'complex' objects with values that are multiple of 100 8070d42a-f6a0-4f6c-bf52-e99f41592c60
Error: Cannot convert 'complex' objects with values that are multiple of 100 bc42c4ab-0b5c-4e60-bf38-6dc4ab5fc096
Error: Cannot convert 'complex' objects with values that are multiple of 100 bbb8f8cc-bfd4-42c2-a803-ceb0ca98cbd0
Error: Cannot convert 'complex' objects with values that are multiple of 100 37dcf9ee-c3f3-4dfe-b478-229707aaaf4f
Error: Cannot convert 'complex' objects with values that are multiple of 100 0fd32ad7-df77-4af7-8f1c-77797c3cf155
Error: Cannot convert 'complex' objects with values that are multiple of 100 9ae9e609-3f28-496e-9f3a-008cbcb0b989
Error: Cannot convert 'complex' objects with values that are multiple of 100 8eb3a7ac-e901-4f3e-810b-56a130c6d469
Error: Cannot convert 'complex' objects with values that are multiple of 100 aabf8d04-662d-42c1-b804-eb2e50ae4800
Error: Cannot convert 'complex' objects with values that are multiple of 100 c5ffd23a-8918-48bb-968f-956983243014
Error: Cannot convert 'complex' objects with values that are multiple of 100 dd2e5117-4c7a-437b-888f-895cf9638be9
Error: Cannot convert 'complex' objects with values that are multiple of 100 6fa1b53b-13d9-4575-a0e0-0791dbe030d2
Error: Cannot convert 'complex' objects with values that are multiple of 100 da2a2b20-41a8-4a05-a874-a06736cda381
Error: Cannot convert 'complex' objects with values that are multiple of 100 b65944af-d034-44c7-8b18-3ebe68b4468e
Error: Cannot convert 'complex' objects with values that are multiple of 100 9299e466-9b99-4601-baa2-595fc4a706e8
Error: Cannot convert 'complex' objects with values that are multiple of 100 8fe1ac93-246a-47ce-a1e9-c561f908899d
Error: Cannot convert 'complex' objects with values that are multiple of 100 4e80230b-0430-4bc6-81d0-ef89a17493a9
Error: Cannot convert 'complex' objects with values that are multiple of 100 d221509e-66ba-44ac-9b2d-4998ede772da
Error: Cannot convert 'complex' objects with values that are multiple of 100 4ee80f91-4b4f-4b2b-8378-27a46ce9441c
Error: Cannot convert 'complex' objects with values that are multiple of 100 62165917-0c1a-43e6-b9f8-c314534b6c56
Error: Cannot convert 'complex' objects with values that are multiple of 100 c9024b66-d083-484d-8979-233a480e40e3
Error: Cannot convert 'complex' objects with values that are multiple of 100 f2621e26-db3f-4310-a546-c0f1c0e8edbb
Error: Cannot convert 'complex' objects with values that are multiple of 100 1feb8331-edba-4daf-82fa-f3b398e7563d
Error: Cannot convert 'complex' objects with values that are multiple of 100 68db8e24-e7d3-4874-9566-d1b7acc9e4c6
Error: Cannot convert 'complex' objects with values that are multiple of 100 ec45f68e-90a7-4ee2-b98e-b0d60c2965b7
Error: Cannot convert 'complex' objects with values that are multiple of 100 a3490c70-df96-42af-bba0-88a6a7ca2056
Error: Cannot convert 'complex' objects with values that are multiple of 100 bf81f48c-59f9-491a-a8f1-2764793e9520
Error: Cannot convert 'complex' objects with values that are multiple of 100 557db728-0992-423c-8a9e-52c8da0c14ad
Error: Cannot convert 'complex' objects with values that are multiple of 100 43fd27fc-ae90-43b0-849f-29d93b94704c
Error: Cannot convert 'complex' objects with values that are multiple of 100 408c2e44-1872-4f5a-b5bb-e47c33973f9b
Error: Cannot convert 'complex' objects with values that are multiple of 100 f6f1e7f0-fa32-4fe3-b2f6-53aaa2327ec4
Error: Cannot convert 'complex' objects with values that are multiple of 100 c6a0a1bd-f602-40af-be00-0e06fbd14a06
Error: Cannot convert 'complex' objects with values that are multiple of 100 ae9ff779-0fa2-4080-9185-d557d5cfd0ca
Error: Cannot convert 'complex' objects with values that are multiple of 100 6fdba719-5fa7-43b6-a9d5-04dd17a92f14
Error: Cannot convert 'complex' objects with values that are multiple of 100 107f299e-ce8d-427e-bb95-a8a7481d93fd
Error: Cannot convert 'complex' objects with values that are multiple of 100 d19d83a2-cb89-423b-9564-b0b2c3a49493
Error: Cannot convert 'complex' objects with values that are multiple of 100 afa5ae22-6ea3-4622-ae35-15402aab00cf
Error: Cannot convert 'complex' objects with values that are multiple of 100 958a9942-9390-449e-b63d-267419a137e2
Error: Cannot convert 'complex' objects with values that are multiple of 100 b2d0f93d-1ccc-4203-9387-476626288bfe
Error: Cannot convert 'complex' objects with values that are multiple of 100 e4774580-6d9c-4bb1-9963-758c17941ec6
Error: Cannot convert 'complex' objects with values that are multiple of 100 50746262-88db-4fcd-a49c-d915b2604614
Error: Cannot convert 'complex' objects with values that are multiple of 100 8b2a7eca-d3d3-491b-a64e-a8d574b8f42c
Error: Cannot convert 'complex' objects with values that are multiple of 100 4993426d-0998-4772-bb55-9c01111c5bdd
Error: Cannot convert 'complex' objects with values that are multiple of 100 fdd0b447-41fb-4f92-9c16-0217fe80822d
Error: Cannot convert 'complex' objects with values that are multiple of 100 12a687c7-4688-4770-82ab-12599deb5909
Error: Cannot convert 'complex' objects with values that are multiple of 100 537b8eb8-bd1e-4aa9-85e7-fd3cce785940
Error: Cannot convert 'complex' objects with values that are multiple of 100 7bb95e91-99f9-4ea1-b570-a269b0d04646
Error: Cannot convert 'complex' objects with values that are multiple of 100 83266df9-8858-4e0b-bc0c-dcd0d8621961
Error: Cannot convert 'complex' objects with values that are multiple of 100 4e2af2d4-4c02-4149-85fc-2371c34c8d02
Error: Cannot convert 'complex' objects with values that are multiple of 100 6dbdaf69-0c44-4077-86ef-6c6676f5eaa3
Error: Cannot convert 'complex' objects with values that are multiple of 100 1e901e76-59bf-486f-a2c9-753ad06ff420
Error: Cannot convert 'complex' objects with values that are multiple of 100 939363c4-1a98-42ce-972f-f3cd3ed8848e
Error: Cannot convert 'complex' objects with values that are multiple of 100 6a30ba99-3676-46bf-bc87-ce8c208df1a9
Error: Cannot convert 'complex' objects with values that are multiple of 100 9b49d344-7deb-42b2-aa46-5fdbe3b15f14
Error: Cannot convert 'complex' objects with values that are multiple of 100 ab7e4fc8-020d-43f0-826e-007fe2c8931e
Error: Cannot convert 'complex' objects with values that are multiple of 100 0327c81e-d2c3-4d02-aafd-35d1b766082c
Error: Cannot convert 'complex' objects with values that are multiple of 100 fa5c31b6-2f70-4e9a-8049-9e3ae1bcb62d
Error: Cannot convert 'complex' objects with values that are multiple of 100 27b660bf-32f0-4e50-8d7e-f82597f67bbe
Error: Cannot convert 'complex' objects with values that are multiple of 100 bca05bbc-8300-4f18-9030-322960a6bc40
Error: Cannot convert 'complex' objects with values that are multiple of 100 b707af19-6309-4923-a72c-00a7e918adf2
Error: Cannot convert 'complex' objects with values that are multiple of 100 d0413269-7587-4649-9393-c62b5017c2c0
Error: Cannot convert 'complex' objects with values that are multiple of 100 bd87fa2d-59ad-45c7-aa97-3ba1b4127daa
Error: Cannot convert 'complex' objects with values that are multiple of 100 74f10be4-1388-4229-bf03-c86422cb097e
Error: Cannot convert 'complex' objects with values that are multiple of 100 4b3d2683-99a7-4e7e-acca-d446a0a08b20
Error: Cannot convert 'complex' objects with values that are multiple of 100 a2c41ff8-d89b-488c-a5a5-95036446afb3
Error: Cannot convert 'complex' objects with values that are multiple of 100 fffa3221-7aa1-4c28-a507-320c16182a4c
Error: Cannot convert 'complex' objects with values that are multiple of 100 3bd4ba00-dc2f-44bc-b50d-a9bf830aaa13
Error: Cannot convert 'complex' objects with values that are multiple of 100 9eb8e1a1-8883-4ae1-bfc8-a70eeb70578d
Error: Cannot convert 'complex' objects with values that are multiple of 100 1fdcc486-bc06-4c45-b89e-73d310ef0b39
Error: Cannot convert 'complex' objects with values that are multiple of 100 3ba7a46b-58a9-487c-975f-f2a1cb38e08b
Error: Cannot convert 'complex' objects with values that are multiple of 100 512a2b3f-3fea-40f8-93be-48b6c7a6a479
Error: Cannot convert 'complex' objects with values that are multiple of 100 314688c7-7a3c-4a80-b484-011ea4d817c4
Error: Cannot convert 'complex' objects with values that are multiple of 100 4e27d701-369f-49fb-8a39-f2ffd04ab1db
Error: Cannot convert 'complex' objects with values that are multiple of 100 be88a16d-a664-4008-ba82-041caefa889f
Error: Cannot convert 'complex' objects with values that are multiple of 100 57274171-71b0-4832-872a-4e17270dab82
Error: Cannot convert 'complex' objects with values that are multiple of 100 4771d442-a45a-4d08-846c-1bfdaec2ccbc
Error: Cannot convert 'complex' objects with values that are multiple of 100 86417be8-150e-48d6-ae77-4f460b5199f2
Error: Cannot convert 'complex' objects with values that are multiple of 100 4e275687-21d6-41b1-9d7a-4d421f21191b
Error: Cannot convert 'complex' objects with values that are multiple of 100 f61e487f-2293-43fb-ad4d-d0c0eaaa26ec
Error: Cannot convert 'complex' objects with values that are multiple of 100 cb1e1ea3-08e3-4150-af74-a6a32de520d2
Error: Cannot convert 'complex' objects with values that are multiple of 100 4bcc5f74-41bf-4042-a057-6a3a6ff8268b
Error: Cannot convert 'complex' objects with values that are multiple of 100 2a33da09-ad45-4d51-bd33-bf8b82258e55
Error: Cannot convert 'complex' objects with values that are multiple of 100 9c7b56df-5205-4fd4-8ce5-a680b17fc610
Error: Cannot convert 'complex' objects with values that are multiple of 100 46309376-56ce-46c6-8048-153883143479
Error: Cannot convert 'complex' objects with values that are multiple of 100 2844d47c-3f9b-479d-8aab-c92e4546bcc9
Error: Cannot convert 'complex' objects with values that are multiple of 100 2e6f6f16-40cd-4669-905c-4a738dc0df55
Error: Cannot convert 'complex' objects with values that are multiple of 100 b3bd09e9-5d5e-49ca-bc8f-54d97304b743
Error: Cannot convert 'complex' objects with values that are multiple of 100 6c1fe282-19fe-4254-a9b0-eaec9e637372
Error: Cannot convert 'complex' objects with values that are multiple of 100 47851556-f15d-4b02-8737-5432bb3f2339
Error: Cannot convert 'complex' objects with values that are multiple of 100 f88405bf-983d-402e-8e75-e07da4c340cc
Error: Cannot convert 'complex' objects with values that are multiple of 100 063ae63f-8b80-40b2-ac25-0db9604157e1
Error: Cannot convert 'complex' objects with values that are multiple of 100 b77402d3-6113-41a1-83f8-9a6e51871583
Error: Cannot convert 'complex' objects with values that are multiple of 100 4cf11d26-d48e-4367-b510-0f26803f09b7
Error: Cannot convert 'complex' objects with values that are multiple of 100 77694b8c-e48e-49d7-a9a0-3a247960ba12
Error: Cannot convert 'complex' objects with values that are multiple of 100 5e10030e-2128-4e85-a46e-d677f1e7a95c
Error: Cannot convert 'complex' objects with values that are multiple of 100 b8f29a6d-1d0e-4faa-9f62-b0524e31e631
Error: Cannot convert 'complex' objects with values that are multiple of 100 77df671c-d14e-4426-a6c2-018b6e3a3261
Error: Cannot convert 'complex' objects with values that are multiple of 100 91ba18c4-d404-46a9-8049-0fc058ac1c36
Error: Cannot convert 'complex' objects with values that are multiple of 100 75eba05b-e62a-4028-8076-3a51109d4039
Error: Cannot convert 'complex' objects with values that are multiple of 100 62337e60-fe1a-466e-9a1f-5e542847e254
Error: Cannot convert 'complex' objects with values that are multiple of 100 ca760932-008e-493d-a24c-7944b306fa08
Error: Cannot convert 'complex' objects with values that are multiple of 100 d4c2f9e0-844f-4409-b649-cba43f9120b7
Error: Cannot convert 'complex' objects with values that are multiple of 100 8a7e9be5-f4c6-4c3c-aceb-cf3f5c284d3e
Error: Cannot convert 'complex' objects with values that are multiple of 100 1ef37bdf-fbfd-4af4-8822-e63eeadb8cec
Error: Cannot convert 'complex' objects with values that are multiple of 100 5908bbef-2706-4023-9074-40a1b58e64c5
Error: Cannot convert 'complex' objects with values that are multiple of 100 9e22b922-4ebe-40b6-b5a5-89e850662e61
Error: Cannot convert 'complex' objects with values that are multiple of 100 d3cae61b-cb66-4098-ab1d-dfd1a6583a6b
Error: Cannot convert 'complex' objects with values that are multiple of 100 69551a2a-6a1e-49c8-a0b3-9cfdacf34834
Error: Cannot convert 'complex' objects with values that are multiple of 100 0811e09f-6068-4ff8-9859-3fdb4d714927
Error: Cannot convert 'complex' objects with values that are multiple of 100 93b46c03-2c4f-458d-b402-7e986388790a
Error: Cannot convert 'complex' objects with values that are multiple of 100 b59266f1-d8a4-4a6a-9d71-b37e04fe2ceb
Error: Cannot convert 'complex' objects with values that are multiple of 100 317fc0bf-fad4-459a-8cff-59ea00331f51
Error: Cannot convert 'complex' objects with values that are multiple of 100 a90b6a7e-edb5-418f-998e-c251cd4f3c97
Error: Cannot convert 'complex' objects with values that are multiple of 100 7d878dd8-ec5c-4e77-b5bd-879be42e458a
Error: Cannot convert 'complex' objects with values that are multiple of 100 5a6fcf02-d651-4b23-877c-cac96af78c1a
Error: Cannot convert 'complex' objects with values that are multiple of 100 8a8926c1-50ef-4d36-af6b-1e8909c25e48
Error: Cannot convert 'complex' objects with values that are multiple of 100 a25b4108-485c-4913-8efa-d62ba67ab05b
Error: Cannot convert 'complex' objects with values that are multiple of 100 eb81d6e8-177f-41fb-8924-7ba46eea3148
Error: Cannot convert 'complex' objects with values that are multiple of 100 e292607a-e362-4446-83e7-8dfce821a3fb
Error: Cannot convert 'complex' objects with values that are multiple of 100 e1c11b81-0a5c-4984-91f1-d2bcee8a38a1
Error: Cannot convert 'complex' objects with values that are multiple of 100 82f73484-9335-4b46-82b5-45d4a74f9f0b
Error: Cannot convert 'complex' objects with values that are multiple of 100 46a9a7ae-275c-4532-b2e3-b4bcfa8cd2e0
Error: Cannot convert 'complex' objects with values that are multiple of 100 5d4c6dc2-cefc-4ac4-92c0-0cd7d40c4203
Error: Cannot convert 'complex' objects with values that are multiple of 100 986c04f2-9bab-448e-9e4f-ca5f488ea69b
Error: Cannot convert 'complex' objects with values that are multiple of 100 f06ff625-d616-4d40-a3bd-7d475f576035
Error: Cannot convert 'complex' objects with values that are multiple of 100 472745be-091c-48d6-9751-5043924286c5
Error: Cannot convert 'complex' objects with values that are multiple of 100 d5de8abb-e10f-41cb-ae57-a30292a0fb6f
Error: Cannot convert 'complex' objects with values that are multiple of 100 3657860c-a998-4546-a4b7-8b51b5e8ec63
Error: Cannot convert 'complex' objects with values that are multiple of 100 cb3429f9-58b5-45d5-828b-f3c0804a5bee
Error: Cannot convert 'complex' objects with values that are multiple of 100 1fcb4146-e595-4d97-b1e8-83d06ce9c4af
Error: Cannot convert 'complex' objects with values that are multiple of 100 dbdb940c-c58f-4f79-911c-28db23c66702
Error: Cannot convert 'complex' objects with values that are multiple of 100 74137202-e0a6-4b85-b1f6-af4173da2298
Error: Cannot convert 'complex' objects with values that are multiple of 100 5c156bf4-2250-416b-8a0a-f52fef157a64
Error: Cannot convert 'complex' objects with values that are multiple of 100 83911a27-b908-4671-b1c1-811e062a3c49
Error: Cannot convert 'complex' objects with values that are multiple of 100 8604f3e2-3c35-482f-99e7-1c4d2dc74910
Error: Cannot convert 'complex' objects with values that are multiple of 100 84a6269b-f2b7-4329-adf4-a81e9fd03a50
Error: Cannot convert 'complex' objects with values that are multiple of 100 be5ea63c-367c-498d-93b6-537692034b64
Error: Cannot convert 'complex' objects with values that are multiple of 100 f21d85b8-9907-46c6-ae34-56ea8d7906e3
Error: Cannot convert 'complex' objects with values that are multiple of 100 e629353b-b4f9-4db0-87f9-6a0b61a5ef0a
Error: Cannot convert 'complex' objects with values that are multiple of 100 fd52b370-517d-4b08-9dee-2515ed011450
Error: Cannot convert 'complex' objects with values that are multiple of 100 3a40e816-7588-4aa8-a6a8-24a0d446d14a
Error: Cannot convert 'complex' objects with values that are multiple of 100 f52ff4c9-1959-44c2-8f67-78ccf0054df0
Error: Cannot convert 'complex' objects with values that are multiple of 100 2dfb648d-736f-4a0b-a6c6-1f1e5194653a
Error: Cannot convert 'complex' objects with values that are multiple of 100 bbe074cf-1b06-476f-94c2-838a5f6f9f5b
Error: Cannot convert 'complex' objects with values that are multiple of 100 6138a61a-092d-4c4c-b4b9-4bbde922bc7a
Error: Cannot convert 'complex' objects with values that are multiple of 100 bc8870f5-917c-4913-b42c-e0a6295c9bda
Error: Cannot convert 'complex' objects with values that are multiple of 100 f51db270-56b3-467c-bb13-e64a824bc947
Error: Cannot convert 'complex' objects with values that are multiple of 100 68e77bca-82b8-4e9d-b7aa-18dc7912476f
Error: Cannot convert 'complex' objects with values that are multiple of 100 a37481ee-b13b-47a8-8d1f-5d28ef77df36
Error: Cannot convert 'complex' objects with values that are multiple of 100 884cfa87-078c-4e94-90a8-88a2d2c09b24
Error: Cannot convert 'complex' objects with values that are multiple of 100 5156d15d-a0aa-4658-998a-44370cc1b798
Error: Cannot convert 'complex' objects with values that are multiple of 100 c4b0c78b-609d-4d3a-b278-3d0e3660093f
Error: Cannot convert 'complex' objects with values that are multiple of 100 8f983501-198d-42bd-a96c-3368a7ae9c76
Error: Cannot convert 'complex' objects with values that are multiple of 100 9331570b-34b1-455a-86ca-53b57975dbd1
Error: Cannot convert 'complex' objects with values that are multiple of 100 fb80fca4-1e11-403b-877e-2277567d4177
Error: Cannot convert 'complex' objects with values that are multiple of 100 8cf4b558-d0ac-49c9-97b3-4096c5ae1066
Error: Cannot convert 'complex' objects with values that are multiple of 100 6a24dbcf-3d4b-4cd1-bf21-228e63d55f40
Error: Cannot convert 'complex' objects with values that are multiple of 100 a36060c7-4132-4fdf-93a8-d9f12d1af131
Error: Cannot convert 'complex' objects with values that are multiple of 100 92ae1330-64a1-49bb-8857-f9688ac1a485
Error: Cannot convert 'complex' objects with values that are multiple of 100 d287c9d6-9ee5-4a93-a39b-638b6c884c54
Error: Cannot convert 'complex' objects with values that are multiple of 100 80e44455-8f2c-4b88-8cde-b81f59036a68
Error: Cannot convert 'complex' objects with values that are multiple of 100 edfc4889-ea05-4631-9c4d-e98030066b28
Error: Cannot convert 'complex' objects with values that are multiple of 100 bbc8a635-9bc5-4049-97a1-30804231a9ab
Error: Cannot convert 'complex' objects with values that are multiple of 100 67d474e6-857d-4704-b135-76855c007b55
Error: Cannot convert 'complex' objects with values that are multiple of 100 fcdbeca7-8fcf-4ed8-8fea-35d7345a84d8
Error: Cannot convert 'complex' objects with values that are multiple of 100 abb69c48-b492-4e60-a46e-d6b72f825016
Error: Cannot convert 'complex' objects with values that are multiple of 100 cfaefb4e-5be1-4944-a838-2608933d4efb
Error: Cannot convert 'complex' objects with values that are multiple of 100 23777806-216b-433e-9c58-3dd420806eab
Error: Cannot convert 'complex' objects with values that are multiple of 100 b86323cf-e4ef-49ed-a0d2-7cc34d46f5e4
Error: Cannot convert 'complex' objects with values that are multiple of 100 8e3cf04e-b591-4a46-b698-382fe6a8537a
Error: Cannot convert 'complex' objects with values that are multiple of 100 c6607bf6-9f8d-4ec5-b182-c334170db3ff
Error: Cannot convert 'complex' objects with values that are multiple of 100 0f3c3bbf-981a-426e-9500-d56b48ab5e28
Error: Cannot convert 'complex' objects with values that are multiple of 100 7b0213e1-d82c-4e75-acb8-e8a8b4bcbb3f
Error: Cannot convert 'complex' objects with values that are multiple of 100 221bebb6-a6ee-49bc-9c42-fb9e4345b8e9
Error: Cannot convert 'complex' objects with values that are multiple of 100 89456437-07d2-4107-a82e-3f8ac91b554b
Error: Cannot convert 'complex' objects with values that are multiple of 100 0136fa8b-8131-41f3-ae8d-efca16e4520e
Error: Cannot convert 'complex' objects with values that are multiple of 100 7ce255d5-6822-4510-a676-338158548f2a
Error: Cannot convert 'complex' objects with values that are multiple of 100 fbbe7e86-4a05-4bf1-b2cd-cd34209d2dbc
Error: Cannot convert 'complex' objects with values that are multiple of 100 088c059b-95c2-4582-a0b5-83a11c8ccd51
Error: Cannot convert 'complex' objects with values that are multiple of 100 3fd4d00e-2e41-4f11-a428-0672cf71da1a
Error: Cannot convert 'complex' objects with values that are multiple of 100 15a51b74-df6e-4ff8-adc1-05746fcb4cb0
Error: Cannot convert 'complex' objects with values that are multiple of 100 ef4bf2ab-5e93-4c51-bd90-3efb7d786e8c
Error: Cannot convert 'complex' objects with values that are multiple of 100 7a76c005-3820-4ec4-a8e5-1c3ae275aad7
Error: Cannot convert 'complex' objects with values that are multiple of 100 e7e33ae6-23b4-4695-8134-21bcd0d89694
Error: Cannot convert 'complex' objects with values that are multiple of 100 15ca89ab-c3cd-4cb5-b7bd-7379453fc7be
Error: Cannot convert 'complex' objects with values that are multiple of 100 f04e2177-c91a-4a68-98dd-75b62efc8a4b
Error: Cannot convert 'complex' objects with values that are multiple of 100 c0b87c63-59d5-4ca1-b9c7-cc287e61e56c
Error: Cannot convert 'complex' objects with values that are multiple of 100 6f208621-8573-4cae-81b2-c8016a2e997f
Error: Cannot convert 'complex' objects with values that are multiple of 100 2fe60964-d054-4d10-ae56-c4520da1dd6a
Error: Cannot convert 'complex' objects with values that are multiple of 100 76e6cb6f-fb1a-46e3-ad21-19e12bb5e613
Error: Cannot convert 'complex' objects with values that are multiple of 100 01d4a881-7d84-47a1-8fed-e7a9a04ed4ae
Error: Cannot convert 'complex' objects with values that are multiple of 100 9be28f4d-4223-4b2a-b1c6-a6166367c71f
Error: Cannot convert 'complex' objects with values that are multiple of 100 f8340992-90c1-48a2-9ed7-ed88c61d5c22
Error: Cannot convert 'complex' objects with values that are multiple of 100 ce0294e7-f2b7-42da-8cc8-349e2227666e
Error: Cannot convert 'complex' objects with values that are multiple of 100 f1f4c570-837b-4d81-918b-a11dcd53f3c9
Error: Cannot convert 'complex' objects with values that are multiple of 100 13828a5a-ac89-4342-8abd-272677d0458b
Error: Cannot convert 'complex' objects with values that are multiple of 100 bd949e12-704e-4e97-a708-68ed1832d917
Error: Cannot convert 'complex' objects with values that are multiple of 100 1c42216e-09c0-4fed-8b13-5d8dffb40631
Error: Cannot convert 'complex' objects with values that are multiple of 100 f48a93c1-3813-446c-9953-3f59e98c8939
Error: Cannot convert 'complex' objects with values that are multiple of 100 aaf6e72f-4ea7-49f0-8892-73be6d378b96
Error: Cannot convert 'complex' objects with values that are multiple of 100 a9c53069-2c3d-4c2a-98e8-072b135a2f70
Error: Cannot convert 'complex' objects with values that are multiple of 100 054e0b42-d45c-4d05-8ea6-3f1851c49ec1
Error: Cannot convert 'complex' objects with values that are multiple of 100 ef6288f6-eed1-4fcc-866c-8940eb11f896
Error: Cannot convert 'complex' objects with values that are multiple of 100 66f6a8a1-64a8-493d-a29b-9a4f5a99bb68
Error: Cannot convert 'complex' objects with values that are multiple of 100 91f2c315-ba90-41c4-b02b-d5a9eb4c3852
Error: Cannot convert 'complex' objects with values that are multiple of 100 120e5898-327e-4e62-82aa-1356ceb6a54a
Error: Cannot convert 'complex' objects with values that are multiple of 100 f195df49-c8f7-4091-bc6e-705696c8c136
Error: Cannot convert 'complex' objects with values that are multiple of 100 9535ef93-8862-4076-9ad0-de999abcb16e
Error: Cannot convert 'complex' objects with values that are multiple of 100 1b5866c8-1c99-45c9-a885-fa96e5b523c4
Error: Cannot convert 'complex' objects with values that are multiple of 100 a0f3afab-e162-4d89-8651-90e9299047f8
Error: Cannot convert 'complex' objects with values that are multiple of 100 d6351638-dbe6-44da-8a44-437940216574
Error: Cannot convert 'complex' objects with values that are multiple of 100 6ea2eee3-4299-4ce4-9a50-75b7c6f4f8d5
Error: Cannot convert 'complex' objects with values that are multiple of 100 72df4f80-c8fb-49da-81e6-5d36d86d71cd
Error: Cannot convert 'complex' objects with values that are multiple of 100 fb773d4c-df4d-45ac-b8ec-cfc47cd40680
Error: Cannot convert 'complex' objects with values that are multiple of 100 8161541b-c66e-42b9-8ca7-6b6ad623daf2
Error: Cannot convert 'complex' objects with values that are multiple of 100 3b896178-3df5-4456-953e-ece5d423f004
Error: Cannot convert 'complex' objects with values that are multiple of 100 f98845fc-d241-439a-8a71-a1ce3613c9e3
Error: Cannot convert 'complex' objects with values that are multiple of 100 42edb68d-ce78-4ed2-bb2b-ec9f158cbff2
Error: Cannot convert 'complex' objects with values that are multiple of 100 ec8ac091-4a1f-4337-8ebc-c1d719ad90ac
Error: Cannot convert 'complex' objects with values that are multiple of 100 04e5624f-6bae-41c5-a949-8205288cdbd3
Error: Cannot convert 'complex' objects with values that are multiple of 100 0af4b4b9-ca28-4755-913e-4b1b61d94039
Error: Cannot convert 'complex' objects with values that are multiple of 100 011397f1-5c24-495a-9e67-d5ad22ca2a92
Error: Cannot convert 'complex' objects with values that are multiple of 100 a6a3b031-4a82-490c-8892-64419bb81b24
Error: Cannot convert 'complex' objects with values that are multiple of 100 3a433149-1136-4b3f-8791-b0a47aed069a
Error: Cannot convert 'complex' objects with values that are multiple of 100 e846f64c-a4aa-44be-936a-ecae01e2d629
Error: Cannot convert 'complex' objects with values that are multiple of 100 a3e66e1e-1aaf-4f74-bb0d-25fd99f20a70
Error: Cannot convert 'complex' objects with values that are multiple of 100 a14fc4c8-c97f-4e8d-961f-c95dbe2e757a
Error: Cannot convert 'complex' objects with values that are multiple of 100 2033d1b9-c919-4c2f-b7b3-19cb777d7bab
Error: Cannot convert 'complex' objects with values that are multiple of 100 d2097d01-845d-4fc0-aa90-f5757b3e27d2
Error: Cannot convert 'complex' objects with values that are multiple of 100 25c64035-a9ef-4ffd-a953-a6d8c507252d
Error: Cannot convert 'complex' objects with values that are multiple of 100 84611edc-45d0-4fe2-9ed6-de9c2c74e2f8
Error: Cannot convert 'complex' objects with values that are multiple of 100 6d17d320-38c0-4522-923f-fbe30f569b2b
Error: Cannot convert 'complex' objects with values that are multiple of 100 fc75e144-f1a8-4c0d-b06d-89b44bfa6bc0
Error: Cannot convert 'complex' objects with values that are multiple of 100 eda7b278-e7d2-442d-93e6-1a9c5d5aa1de
Error: Cannot convert 'complex' objects with values that are multiple of 100 7ad3a22f-aa7c-40bf-bda5-5b4528d5f273
Error: Cannot convert 'complex' objects with values that are multiple of 100 9db94c5e-240e-4830-9276-2bb36ed13c33
Error: Cannot convert 'complex' objects with values that are multiple of 100 058799ce-70ee-4a9f-a6e3-5659502ad4c5
Error: Cannot convert 'complex' objects with values that are multiple of 100 69efc0de-0af5-4b6b-a27a-91059f59cfcb
Error: Cannot convert 'complex' objects with values that are multiple of 100 c3e02499-4e74-4a87-b919-ff9cd643d200
Error: Cannot convert 'complex' objects with values that are multiple of 100 579a20b1-4787-4e80-a952-4c129f6d4eeb
Error: Cannot convert 'complex' objects with values that are multiple of 100 7343c469-1913-452d-8049-c293c082d14c
Error: Cannot convert 'complex' objects with values that are multiple of 100 fa839a15-6ce1-472e-9bfb-0bee991915f7
Error: Cannot convert 'complex' objects with values that are multiple of 100 0c7b123e-2eca-455b-99b3-2a611c3b83a4
Error: Cannot convert 'complex' objects with values that are multiple of 100 168a6f23-12c9-4b24-b259-ea845c6353e8
Error: Cannot convert 'complex' objects with values that are multiple of 100 89f27fc8-b00d-4bf1-8035-273dd9b88829
Error: Cannot convert 'complex' objects with values that are multiple of 100 e4f7f5f4-77e4-4b4e-b078-29a043e65857
Error: Cannot convert 'complex' objects with values that are multiple of 100 d6645dd1-9e55-4302-b1c4-01229013a0b3
Error: Cannot convert 'complex' objects with values that are multiple of 100 0f57c881-102c-41bf-af6a-64206ecc2e5c
Error: Cannot convert 'complex' objects with values that are multiple of 100 d7b235dc-57ed-48eb-89d4-5b951aa7fdd7
Error: Cannot convert 'complex' objects with values that are multiple of 100 68152075-cca5-45d4-9ea4-a32c10a329f4
Error: Cannot convert 'complex' objects with values that are multiple of 100 334a960e-05bf-4ec3-aca4-66ea43f35dde
Error: Cannot convert 'complex' objects with values that are multiple of 100 14c75ce9-5168-4af0-b2d7-aeb81610fe56
Error: Cannot convert 'complex' objects with values that are multiple of 100 71f0c7d3-0895-4f9e-9659-cc9700088b71
Error: Cannot convert 'complex' objects with values that are multiple of 100 b6370d43-6256-4b6f-99ce-5a4ba3c0e751
Error: Cannot convert 'complex' objects with values that are multiple of 100 962eabc8-e3b0-49d9-bda1-96fe7dd8f45c
Error: Cannot convert 'complex' objects with values that are multiple of 100 a8a40bf1-9c25-4335-9628-1c8510acb810
Error: Cannot convert 'complex' objects with values that are multiple of 100 154718fc-9960-4da1-ba6e-e4559317fc5e
Error: Cannot convert 'complex' objects with values that are multiple of 100 ace890a7-45f5-47d3-b7b6-b7b22930bc56
Error: Cannot convert 'complex' objects with values that are multiple of 100 38dea44f-20f9-4555-9e98-5ea1102863ee
Error: Cannot convert 'complex' objects with values that are multiple of 100 54f0f771-7a75-43ec-94c6-7533001aef7c
Error: Cannot convert 'complex' objects with values that are multiple of 100 cee345bf-a0cf-4043-ae1c-966db0435a74
Error: Cannot convert 'complex' objects with values that are multiple of 100 989bac58-0dd2-4add-ad37-39df470bed20
Error: Cannot convert 'complex' objects with values that are multiple of 100 cc31af7e-4b73-43f6-bf38-b6da18a57684
Error: Cannot convert 'complex' objects with values that are multiple of 100 3e432e82-f4e1-4190-ba02-2db306f839a4
Error: Cannot convert 'complex' objects with values that are multiple of 100 f4340d96-f59b-4a3b-b58e-39f34bae8e9d
Error: Cannot convert 'complex' objects with values that are multiple of 100 b992aa41-b551-48de-b50a-e0fdc89842f9
Error: Cannot convert 'complex' objects with values that are multiple of 100 ac6e8a09-fdb4-40f1-8bcf-889adbdd50e1
Error: Cannot convert 'complex' objects with values that are multiple of 100 28f7ee5e-dfb2-4b09-ab9b-178491155999
Error: Cannot convert 'complex' objects with values that are multiple of 100 2d967a4e-b27a-4dd8-8c21-95c11591ee94
Error: Cannot convert 'complex' objects with values that are multiple of 100 9cdc324a-16a9-44e1-8e09-4e2d9d0c77ee
Error: Cannot convert 'complex' objects with values that are multiple of 100 8194569b-33da-4ef4-b2cd-e6eaf0f1d93b
Error: Cannot convert 'complex' objects with values that are multiple of 100 37d88654-b781-4eb3-afd0-08eb7c58879f
Error: Cannot convert 'complex' objects with values that are multiple of 100 e75e1899-b1a5-41cc-8884-8dd40912cce7
Error: Cannot convert 'complex' objects with values that are multiple of 100 eef9e018-14c1-4a16-9a1c-f9d2720e666e
Error: Cannot convert 'complex' objects with values that are multiple of 100 1bcc0791-cfb7-4cb8-bf45-5243ea7b3664
Error: Cannot convert 'complex' objects with values that are multiple of 100 93142177-ac28-4401-8f9d-08d824f5a72e
Error: Cannot convert 'complex' objects with values that are multiple of 100 4ec64cb8-dcff-40f0-988a-4fde32042601
Error: Cannot convert 'complex' objects with values that are multiple of 100 5b1c8aa1-6c04-4add-a13b-b9e6e2b60619
Error: Cannot convert 'complex' objects with values that are multiple of 100 c64249a4-7495-421c-b3d1-a663e2ae2413
Error: Cannot convert 'complex' objects with values that are multiple of 100 0824c58b-04d5-4e85-8b7b-01820362ad7d
Error: Cannot convert 'complex' objects with values that are multiple of 100 b93dcb07-9ead-434b-b374-d6934b485b06
Error: Cannot convert 'complex' objects with values that are multiple of 100 d8d6e299-fe26-47d7-b9ef-083781549d89
Error: Cannot convert 'complex' objects with values that are multiple of 100 9e3405fa-7373-4bab-be56-716c90b2eecf
Error: Cannot convert 'complex' objects with values that are multiple of 100 b8980b2d-1240-4e0e-a7e7-afc9b09f8fcc
Error: Cannot convert 'complex' objects with values that are multiple of 100 eb637eaa-0d7f-4014-86bb-6e59781bcdf4
Error: Cannot convert 'complex' objects with values that are multiple of 100 62a04af9-ec82-435b-bdfc-1939db304752
Error: Cannot convert 'complex' objects with values that are multiple of 100 d5078a2b-6a1c-491c-8a45-d2fed7132590"
`;

View file

@ -0,0 +1,39 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import { getBaselineDocuments } from '../kibana_migrator_test_kit.fixtures';
import {
BASELINE_DOCUMENTS_PER_TYPE_1K,
BASELINE_DOCUMENTS_PER_TYPE_500K,
BASELINE_ELASTICSEARCH_VERSION,
BASELINE_TEST_ARCHIVE_1K,
BASELINE_TEST_ARCHIVE_500K,
createBaselineArchive,
} from '../kibana_migrator_archive_utils';
/**
* Enable and execute this test ONLY IN YOUR DEV MACHINE, in order to build new test packages
*/
describe.skip('migration tests toolkit', () => {
it('can create a 1k documents ZIP archive', async () => {
await createBaselineArchive({
esVersion: BASELINE_ELASTICSEARCH_VERSION,
documents: getBaselineDocuments({ documentsPerType: BASELINE_DOCUMENTS_PER_TYPE_1K }),
dataArchive: BASELINE_TEST_ARCHIVE_1K,
});
});
it('can create a 400k documents ZIP archive', async () => {
await createBaselineArchive({
esVersion: BASELINE_ELASTICSEARCH_VERSION,
documents: getBaselineDocuments({ documentsPerType: BASELINE_DOCUMENTS_PER_TYPE_500K }),
dataArchive: BASELINE_TEST_ARCHIVE_500K,
});
});
});

View file

@ -0,0 +1,347 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import { join } from 'path';
import { omit } from 'lodash';
import JSON5 from 'json5';
import type { TestElasticsearchUtils } from '@kbn/core-test-helpers-kbn-server';
import type { MigrationResult } from '@kbn/core-saved-objects-base-server-internal';
import {
defaultKibanaIndex,
defaultKibanaTaskIndex,
startElasticsearch,
getAggregatedTypesCount,
type KibanaMigratorTestKit,
readLog,
clearLog,
currentVersion,
nextMinor,
} from '../kibana_migrator_test_kit';
import {
BASELINE_COMPLEX_DOCUMENTS_500K_AFTER,
BASELINE_DOCUMENTS_PER_TYPE_500K,
BASELINE_TEST_ARCHIVE_500K,
} from '../kibana_migrator_archive_utils';
import {
getReindexingBaselineTypes,
getReindexingMigratorTestKit,
getUpToDateMigratorTestKit,
} from '../kibana_migrator_test_kit.fixtures';
import { delay, getDocVersion } from '../test_utils';
import { expectDocumentsMigratedToHighestVersion } from '../kibana_migrator_test_kit.expect';
const logFilePath = join(__dirname, 'v2_migration.log');
const docVersion = getDocVersion();
describe('v2 migration', () => {
let esServer: TestElasticsearchUtils;
beforeAll(async () => {
esServer = await startElasticsearch({ dataArchive: BASELINE_TEST_ARCHIVE_500K });
});
afterAll(async () => {
if (esServer) {
await esServer.stop();
await delay(5); // give it a few seconds... cause we always do ¯\_(ツ)_/¯
}
});
describe('to the current stack version', () => {
let upToDateKit: KibanaMigratorTestKit;
let migrationResults: MigrationResult[];
beforeAll(async () => {
await clearLog(logFilePath);
upToDateKit = await getUpToDateMigratorTestKit({
logFilePath,
kibanaVersion: currentVersion,
});
migrationResults = await upToDateKit.runMigrations();
});
it('skips UPDATE_TARGET_MAPPINGS_PROPERTIES if there are no changes in the mappings', async () => {
const logs = await readLog(logFilePath);
expect(logs).not.toMatch('CREATE_NEW_TARGET');
expect(logs).toMatch(
`[${defaultKibanaIndex}] CHECK_TARGET_MAPPINGS -> CHECK_VERSION_INDEX_READY_ACTIONS`
);
expect(logs).toMatch(
`[${defaultKibanaTaskIndex}] CHECK_TARGET_MAPPINGS -> CHECK_VERSION_INDEX_READY_ACTIONS`
);
expect(logs).not.toMatch('UPDATE_TARGET_MAPPINGS_PROPERTIES');
expect(logs).not.toMatch('UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK');
expect(logs).not.toMatch('UPDATE_TARGET_MAPPINGS_META');
});
it(`returns a 'patched' status for each SO index`, () => {
// omit elapsedMs as it varies in each execution
expect(migrationResults.map((result) => omit(result, 'elapsedMs'))).toMatchInlineSnapshot(`
Array [
Object {
"destIndex": ".kibana_migrator_${currentVersion}_001",
"status": "patched",
},
Object {
"destIndex": ".kibana_migrator_tasks_${currentVersion}_001",
"status": "patched",
},
]
`);
});
it('each migrator takes less than 10 seconds', () => {
const painfulMigrator = (migrationResults as Array<{ elapsedMs?: number }>).find(
({ elapsedMs }) => elapsedMs && elapsedMs > 10_000
);
expect(painfulMigrator).toBeUndefined();
});
});
describe('to a newer stack version', () => {
describe('with unknown types', () => {
let unknownTypesKit: KibanaMigratorTestKit;
let logs: string;
beforeAll(async () => {
await clearLog(logFilePath);
unknownTypesKit = await getReindexingMigratorTestKit({
logFilePath,
// filter out 'task' objects in order to not spawn that migrator for this test
types: getReindexingBaselineTypes(true).filter(({ name }) => name !== 'task'),
settings: {
migrations: {
discardUnknownObjects: currentVersion, // instead of the actual target, 'nextMinor'
},
},
});
});
it('fails if Kibana is not configured to discard unknown objects', async () => {
await expect(unknownTypesKit.runMigrations()).rejects.toThrowErrorMatchingInlineSnapshot(`
"Unable to complete saved object migrations for the [.kibana_migrator] index: Migration failed because some documents were found which use unknown saved object types: deprecated
To proceed with the migration you can configure Kibana to discard unknown saved objects for this migration.
Please refer to https://www.elastic.co/guide/en/kibana/${docVersion}/resolve-migrations-failures.html for more information."
`);
logs = await readLog(logFilePath);
expect(logs).toMatch(
'The flag `migrations.discardUnknownObjects` is defined but does not match the current kibana version; unknown objects will NOT be discarded.'
);
expect(logs).toMatch(
`[${defaultKibanaIndex}] Migration failed because some documents were found which use unknown saved object types: deprecated`
);
expect(logs).toMatch(`[${defaultKibanaIndex}] CHECK_UNKNOWN_DOCUMENTS -> FATAL.`);
});
});
describe('with transform errors', () => {
let transformErrorsKit: KibanaMigratorTestKit;
let logs: string;
beforeAll(async () => {
await clearLog(logFilePath);
transformErrorsKit = await getReindexingMigratorTestKit({
logFilePath,
// filter out 'task' objects in order to not spawn that migrator for this test
types: getReindexingBaselineTypes(true).filter(({ name }) => name !== 'task'),
settings: {
migrations: {
discardCorruptObjects: currentVersion, // instead of the actual target, 'nextMinor'
},
},
});
});
it('collects corrupt saved object documents across batches', async () => {
try {
await transformErrorsKit.runMigrations();
} catch (error) {
const lines = error.message
.split('\n')
.filter((line: string) => line.includes(`'complex'`))
.join('\n');
expect(lines).toMatchSnapshot();
}
});
it('fails if Kibana is not configured to discard transform errors', async () => {
logs = await readLog(logFilePath);
expect(logs).toMatch(
`Cannot convert 'complex' objects with values that are multiple of 100`
);
expect(logs).toMatch(`[${defaultKibanaIndex}] REINDEX_SOURCE_TO_TEMP_READ -> FATAL.`);
});
it('closes reindex PIT upon failure', async () => {
const lineWithPit = logs
.split('\n')
.find((line) =>
line.includes(`[${defaultKibanaIndex}] REINDEX_SOURCE_TO_TEMP_OPEN_PIT PitId:`)
);
expect(lineWithPit).toBeTruthy();
const id = JSON5.parse(lineWithPit!).message.split(':')[1];
expect(id).toBeTruthy();
await expect(
transformErrorsKit.client.search({
pit: { id },
})
// throws an exception that cannot search with closed PIT
).rejects.toThrow(/search_phase_execution_exception/);
});
});
describe('configured to discard transform errors and unknown types', () => {
let kit: KibanaMigratorTestKit;
let migrationResults: MigrationResult[];
let logs: string;
beforeAll(async () => {
await clearLog(logFilePath);
kit = await getReindexingMigratorTestKit({
logFilePath,
filterDeprecated: true,
});
migrationResults = await kit.runMigrations();
logs = await readLog(logFilePath);
});
it('migrates documents to the highest version', async () => {
await expectDocumentsMigratedToHighestVersion(kit.client, [
defaultKibanaIndex,
defaultKibanaTaskIndex,
]);
});
describe('a migrator performing a compatible upgrade migration', () => {
it('updates target mappings when mappings have changed', () => {
expect(logs).toMatch(
`[${defaultKibanaTaskIndex}] CHECK_TARGET_MAPPINGS -> UPDATE_TARGET_MAPPINGS_PROPERTIES.`
);
expect(logs).toMatch(
`[${defaultKibanaTaskIndex}] UPDATE_TARGET_MAPPINGS_PROPERTIES -> UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK.`
);
expect(logs).toMatch(
`[${defaultKibanaTaskIndex}] UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK -> UPDATE_TARGET_MAPPINGS_META.`
);
expect(logs).toMatch(
`[${defaultKibanaTaskIndex}] UPDATE_TARGET_MAPPINGS_META -> CHECK_VERSION_INDEX_READY_ACTIONS.`
);
});
it('updates the version aliases during the PREPARE_COMPATIBLE_MIGRATION step', () => {
expect(logs).toMatch(`[${defaultKibanaTaskIndex}] PREPARE_COMPATIBLE_MIGRATION`);
expect(logs).not.toMatch(`[${defaultKibanaTaskIndex}] MARK_VERSION_INDEX_READY`);
expect(logs).toMatch(
`[${defaultKibanaTaskIndex}] CHECK_VERSION_INDEX_READY_ACTIONS -> DONE.`
);
});
});
describe('a migrator performing a reindexing migration', () => {
describe('when an index contains SO types with incompatible mappings', () => {
it('executes the reindexing migration steps', () => {
expect(logs).toMatch(`[${defaultKibanaIndex}] INIT -> WAIT_FOR_YELLOW_SOURCE.`);
expect(logs).toMatch(
`[${defaultKibanaIndex}] WAIT_FOR_YELLOW_SOURCE -> UPDATE_SOURCE_MAPPINGS_PROPERTIES.`
);
expect(logs).toMatch(
`[${defaultKibanaIndex}] UPDATE_SOURCE_MAPPINGS_PROPERTIES -> CHECK_CLUSTER_ROUTING_ALLOCATION.`
);
expect(logs).toMatch(
`[${defaultKibanaIndex}] CHECK_CLUSTER_ROUTING_ALLOCATION -> CHECK_UNKNOWN_DOCUMENTS.`
);
expect(logs).toMatch(
`[${defaultKibanaIndex}] CHECK_TARGET_MAPPINGS -> UPDATE_TARGET_MAPPINGS_PROPERTIES.`
);
expect(logs).toMatch(
`[${defaultKibanaIndex}] UPDATE_TARGET_MAPPINGS_META -> CHECK_VERSION_INDEX_READY_ACTIONS.`
);
expect(logs).toMatch(
`[${defaultKibanaIndex}] CHECK_VERSION_INDEX_READY_ACTIONS -> MARK_VERSION_INDEX_READY.`
);
expect(logs).toMatch(`[${defaultKibanaIndex}] MARK_VERSION_INDEX_READY -> DONE.`);
expect(logs).not.toMatch(`[${defaultKibanaIndex}] CREATE_NEW_TARGET`);
expect(logs).not.toMatch(`[${defaultKibanaIndex}] CLEANUP_UNKNOWN_AND_EXCLUDED`);
expect(logs).not.toMatch(`[${defaultKibanaIndex}] PREPARE_COMPATIBLE_MIGRATION`);
});
});
describe('copies the right documents over to the target indices', () => {
let primaryIndexCounts: Record<string, number>;
let taskIndexCounts: Record<string, number>;
beforeAll(async () => {
primaryIndexCounts = await getAggregatedTypesCount(kit.client, defaultKibanaIndex);
taskIndexCounts = await getAggregatedTypesCount(kit.client, defaultKibanaTaskIndex);
});
it('copies documents to the right indices depending on their types', () => {
expect(primaryIndexCounts.basic).toBeDefined();
expect(primaryIndexCounts.complex).toBeDefined();
expect(primaryIndexCounts.task).not.toBeDefined();
expect(taskIndexCounts.basic).not.toBeDefined();
expect(taskIndexCounts.complex).not.toBeDefined();
expect(taskIndexCounts.task).toBeDefined();
});
it('discards REMOVED_TYPES', () => {
expect(primaryIndexCounts.server).not.toBeDefined();
expect(taskIndexCounts.server).not.toBeDefined();
});
it('discards unknown types', () => {
expect(primaryIndexCounts.deprecated).not.toBeDefined();
expect(taskIndexCounts.deprecated).not.toBeDefined();
});
it('copies all of the documents', () => {
expect(primaryIndexCounts.basic).toEqual(BASELINE_DOCUMENTS_PER_TYPE_500K);
expect(taskIndexCounts.task).toEqual(BASELINE_DOCUMENTS_PER_TYPE_500K);
});
it('executes the excludeOnUpgrade hook', () => {
expect(primaryIndexCounts.complex).toEqual(BASELINE_COMPLEX_DOCUMENTS_500K_AFTER);
});
});
it('returns a migrated status for each SO index', () => {
// omit elapsedMs as it varies in each execution
expect(migrationResults.map((result) => omit(result, 'elapsedMs')))
.toMatchInlineSnapshot(`
Array [
Object {
"destIndex": ".kibana_migrator_${nextMinor}_001",
"sourceIndex": ".kibana_migrator_${currentVersion}_001",
"status": "migrated",
},
Object {
"destIndex": ".kibana_migrator_tasks_${currentVersion}_001",
"sourceIndex": ".kibana_migrator_tasks_${currentVersion}_001",
"status": "migrated",
},
]
`);
});
it('each migrator takes less than 60 seconds', () => {
const painfulMigrator = (migrationResults as Array<{ elapsedMs?: number }>).find(
({ elapsedMs }) => elapsedMs && elapsedMs > 60_000
);
expect(painfulMigrator).toBeUndefined();
});
});
});
});
});

View file

@ -22,6 +22,7 @@ import { REPO_ROOT } from '@kbn/repo-info';
import { getEnvOptions } from '@kbn/config-mocks';
import { LogRecord } from '@kbn/logging';
import { retryAsync } from '@kbn/core-saved-objects-migration-server-mocks';
import { delay } from '../test_utils';
const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
const targetIndex = `.kibana_${kibanaVersion}_001`;
@ -89,6 +90,7 @@ describe('migration v2', () => {
}
if (esServer) {
await esServer.stop();
await delay(10);
}
});

View file

@ -1,126 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import fs from 'fs/promises';
import JSON5 from 'json5';
import {
createTestServers,
createRootWithCorePlugins,
type TestElasticsearchUtils,
} from '@kbn/core-test-helpers-kbn-server';
import { retryAsync } from '@kbn/core-saved-objects-migration-server-mocks';
import { Root } from '@kbn/core-root-server-internal';
const logFilePath = Path.join(__dirname, 'batch_size_bytes_exceeds_es_content_length.log');
async function removeLogFile() {
// ignore errors if it doesn't exist
await fs.unlink(logFilePath).catch(() => void 0);
}
// Failing ES Promotion: https://github.com/elastic/kibana/issues/158313
describe.skip('migration v2', () => {
let esServer: TestElasticsearchUtils;
let root: Root;
let startES: () => Promise<TestElasticsearchUtils>;
beforeAll(async () => {
await removeLogFile();
});
beforeEach(() => {
({ startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
dataArchive: Path.join(
__dirname,
'..',
'archives',
'7.14.0_xpack_sample_saved_objects.zip'
),
esArgs: ['http.max_content_length=1mb'],
},
},
}));
});
afterEach(async () => {
if (root) {
await root.shutdown();
}
if (esServer) {
await esServer.stop();
}
});
it('fails with a descriptive message when maxBatchSizeBytes exceeds ES http.max_content_length', async () => {
root = createRoot({ maxBatchSizeBytes: 1715329 });
esServer = await startES();
await root.preboot();
await root.setup();
await expect(root.start()).rejects.toMatchInlineSnapshot(
`[Error: Unable to complete saved object migrations for the [.kibana] index: While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.maxBatchSizeBytes' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option.]`
);
await retryAsync(
async () => {
const logFileContent = await fs.readFile(logFilePath, 'utf-8');
const records = logFileContent
.split('\n')
.filter(Boolean)
.map((str) => JSON5.parse(str)) as any[];
expect(
records.find((rec) =>
rec.message.startsWith(
`Reason: Unable to complete saved object migrations for the [.kibana] index: While indexing a batch of saved objects, Elasticsearch returned a 413 Request Entity Too Large exception. Ensure that the Kibana configuration option 'migrations.maxBatchSizeBytes' is set to a value that is lower than or equal to the Elasticsearch 'http.max_content_length' configuration option.`
)
)
).toBeDefined();
},
{ retryAttempts: 10, retryDelayMs: 200 }
);
});
});
function createRoot(options: { maxBatchSizeBytes?: number }) {
return createRootWithCorePlugins(
{
migrations: {
skip: false,
batchSize: 1000,
maxBatchSizeBytes: options.maxBatchSizeBytes,
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFilePath,
layout: {
type: 'json',
},
},
},
loggers: [
{
name: 'root',
level: 'info',
appenders: ['file'],
},
],
},
},
{
oss: false,
}
);
}

View file

@ -1,167 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import fs from 'fs/promises';
import { Env } from '@kbn/config';
import { REPO_ROOT } from '@kbn/repo-info';
import { getEnvOptions } from '@kbn/config-mocks';
import { Root } from '@kbn/core-root-server-internal';
import {
createRootWithCorePlugins,
createTestServers,
type TestElasticsearchUtils,
} from '@kbn/core-test-helpers-kbn-server';
const logFilePath = Path.join(__dirname, 'check_target_mappings.log');
describe('migration v2 - CHECK_TARGET_MAPPINGS', () => {
let esServer: TestElasticsearchUtils;
let root: Root;
let logs: string;
beforeEach(async () => {
await fs.unlink(logFilePath).catch(() => {});
});
afterEach(async () => {
await root?.shutdown();
await esServer?.stop();
});
it('is not run for new installations', async () => {
const { startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
},
},
});
root = createRoot();
esServer = await startES();
await root.preboot();
await root.setup();
await root.start();
// Check for migration steps present in the logs
logs = await fs.readFile(logFilePath, 'utf-8');
expect(logs).toMatch('CREATE_NEW_TARGET');
expect(logs).not.toMatch('CHECK_TARGET_MAPPINGS');
});
describe('when the indices are aligned with the stack version', () => {
it('skips UPDATE_TARGET_MAPPINGS_PROPERTIES if there are no changes in the mappings', async () => {
const { startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
},
},
});
esServer = await startES();
// start Kibana a first time to create the system indices
root = createRoot();
await root.preboot();
await root.setup();
await root.start();
// stop Kibana and remove logs
await root.shutdown();
await fs.unlink(logFilePath).catch(() => {});
root = createRoot();
await root.preboot();
await root.setup();
await root.start();
// Check for migration steps present in the logs
logs = await fs.readFile(logFilePath, 'utf-8');
expect(logs).not.toMatch('CREATE_NEW_TARGET');
expect(logs).toMatch('CHECK_TARGET_MAPPINGS -> CHECK_VERSION_INDEX_READY_ACTIONS');
expect(logs).not.toMatch('UPDATE_TARGET_MAPPINGS_PROPERTIES');
expect(logs).not.toMatch('UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK');
expect(logs).not.toMatch('UPDATE_TARGET_MAPPINGS_META');
});
});
describe('when upgrading to a newer stack version', () => {
const currentVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
it('runs UPDATE_TARGET_MAPPINGS_PROPERTIES when mappings have changed', async () => {
const { startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
dataArchive: Path.join(__dirname, '..', 'archives', '8.4.0_with_sample_data_logs.zip'),
},
},
});
esServer = await startES();
// start Kibana a first time to create the system indices
root = createRoot(currentVersion); // we discard a bunch of SO that have become unknown since 8.4.0
await root.preboot();
await root.setup();
await root.start();
// Check for migration steps present in the logs
logs = await fs.readFile(logFilePath, 'utf-8');
expect(logs).not.toMatch('[.kibana] CREATE_NEW_TARGET');
expect(logs).toMatch('CHECK_TARGET_MAPPINGS -> UPDATE_TARGET_MAPPINGS_PROPERTIES');
expect(logs).toMatch(
'UPDATE_TARGET_MAPPINGS_PROPERTIES -> UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK'
);
expect(logs).toMatch(
'UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK -> UPDATE_TARGET_MAPPINGS_META'
);
expect(logs).toMatch('UPDATE_TARGET_MAPPINGS_META -> CHECK_VERSION_INDEX_READY_ACTIONS');
expect(logs).toMatch('Migration completed');
});
});
});
function createRoot(discardUnknownObjects?: string, customKibanaVersion?: string) {
return createRootWithCorePlugins(
{
migrations: {
discardUnknownObjects,
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFilePath,
layout: {
type: 'json',
},
},
},
loggers: [
{
name: 'root',
level: 'info',
appenders: ['file'],
},
],
},
},
{
oss: true,
},
customKibanaVersion
);
}

View file

@ -1,192 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import Fs from 'fs';
import Util from 'util';
import JSON5 from 'json5';
import { type TestElasticsearchUtils } from '@kbn/core-test-helpers-kbn-server';
import { SavedObjectsType } from '@kbn/core-saved-objects-server';
import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { getMigrationDocLink } from '../test_utils';
import {
clearLog,
currentVersion,
defaultKibanaIndex,
getKibanaMigratorTestKit,
nextMinor,
startElasticsearch,
} from '../kibana_migrator_test_kit';
const migrationDocLink = getMigrationDocLink().resolveMigrationFailures;
const logFilePath = Path.join(__dirname, 'cleanup.log');
const asyncReadFile = Util.promisify(Fs.readFile);
describe('migration v2', () => {
let esServer: TestElasticsearchUtils['es'];
let esClient: ElasticsearchClient;
beforeAll(async () => {
esServer = await startElasticsearch();
});
beforeEach(async () => {
esClient = await setupBaseline();
await clearLog(logFilePath);
});
it('clean ups if migration fails', async () => {
const { runMigrations } = await setupNextMinor();
await expect(runMigrations()).rejects.toThrowErrorMatchingInlineSnapshot(`
"Unable to complete saved object migrations for the [${defaultKibanaIndex}] index: Migrations failed. Reason: 1 corrupt saved object documents were found: corrupt:2baf4de0-a6d4-11ed-ba5a-39196fc76e60
To allow migrations to proceed, please delete or fix these documents.
Note that you can configure Kibana to automatically discard corrupt documents and transform errors for this migration.
Please refer to ${migrationDocLink} for more information."
`);
const logFileContent = await asyncReadFile(logFilePath, 'utf-8');
const records = logFileContent
.split('\n')
.filter(Boolean)
.map((str) => JSON5.parse(str));
const logRecordWithPit = records.find(
(rec) => rec.message === `[${defaultKibanaIndex}] REINDEX_SOURCE_TO_TEMP_OPEN_PIT RESPONSE`
);
expect(logRecordWithPit).toBeTruthy();
});
afterEach(async () => {
await esClient?.indices.delete({ index: `${defaultKibanaIndex}_${currentVersion}_001` });
});
afterAll(async () => {
await esServer?.stop();
});
});
const setupBaseline = async () => {
const typesCurrent: SavedObjectsType[] = [
{
name: 'complex',
hidden: false,
namespaceType: 'agnostic',
mappings: {
properties: {
name: { type: 'text' },
value: { type: 'integer' },
},
},
migrations: {},
},
];
const savedObjects = [
{
id: 'complex:4baf4de0-a6d4-11ed-ba5a-39196fc76e60',
body: {
type: 'complex',
complex: {
name: 'foo',
value: 5,
},
references: [],
coreMigrationVersion: currentVersion,
updated_at: '2023-02-07T11:04:44.914Z',
created_at: '2023-02-07T11:04:44.914Z',
},
},
{
id: 'corrupt:2baf4de0-a6d4-11ed-ba5a-39196fc76e60', // incorrect id => corrupt object
body: {
type: 'complex',
complex: {
name: 'bar',
value: 3,
},
references: [],
coreMigrationVersion: currentVersion,
updated_at: '2023-02-07T11:04:44.914Z',
created_at: '2023-02-07T11:04:44.914Z',
},
},
];
const { runMigrations, client } = await getKibanaMigratorTestKit({
types: typesCurrent,
logFilePath,
});
await runMigrations();
// inject corrupt saved objects directly using esClient
await Promise.all(
savedObjects.map((savedObject) =>
client.create({
index: defaultKibanaIndex,
refresh: 'wait_for',
...savedObject,
})
)
);
return client;
};
const setupNextMinor = async () => {
const typesNextMinor: SavedObjectsType[] = [
{
name: 'complex',
hidden: false,
namespaceType: 'agnostic',
mappings: {
properties: {
name: { type: 'keyword' },
value: { type: 'long' },
},
},
migrations: {
[nextMinor]: (doc) => doc,
},
},
];
return await getKibanaMigratorTestKit({
types: typesNextMinor,
kibanaVersion: nextMinor,
logFilePath,
settings: {
migrations: {
skip: false,
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFilePath,
layout: {
type: 'json',
},
},
},
loggers: [
{
name: 'root',
appenders: ['file'],
level: 'debug', // DEBUG logs are required to retrieve the PIT _id from the action response logs
},
],
},
},
});
};

View file

@ -1,222 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import Fs from 'fs';
import Util from 'util';
import {
createTestServers,
createRootWithCorePlugins,
type TestElasticsearchUtils,
} from '@kbn/core-test-helpers-kbn-server';
import { Root } from '@kbn/core-root-server-internal';
import { getMigrationDocLink } from '../test_utils';
const migrationDocLink = getMigrationDocLink().resolveMigrationFailures;
const logFilePath = Path.join(__dirname, 'collects_corrupt_docs.log');
const asyncUnlink = Util.promisify(Fs.unlink);
async function removeLogFile() {
// ignore errors if it doesn't exist
await asyncUnlink(logFilePath).catch(() => void 0);
}
describe('migration v2 with corrupt saved object documents', () => {
let esServer: TestElasticsearchUtils;
let root: Root;
beforeAll(async () => {
await removeLogFile();
});
afterAll(async () => {
if (root) {
await root.shutdown();
}
if (esServer) {
await esServer.stop();
}
});
it('collects corrupt saved object documents across batches', async () => {
const { startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
// contains 4 `foo` objects, all with a `migrationVersion` of `7.13.0`
// - foo:1 and foo:2 have correct values for their `number` property (13 and 42 respectively)
// - foo:3 and foo:4 don't have the property, and will fail during the `7.14.0` registered migration
// contains migrated index with 8.0 aliases to skip migration, but run outdated doc search
dataArchive: Path.join(
__dirname,
'..',
'archives',
'8.0.0_document_migration_failure.zip'
),
},
},
});
root = createRoot();
esServer = await startES();
await root.preboot();
const coreSetup = await root.setup();
coreSetup.savedObjects.registerType({
name: 'foo',
hidden: false,
mappings: {
properties: {
number: { type: 'integer' },
},
},
namespaceType: 'agnostic',
migrations: {
'7.14.0': (doc) => {
if (doc.attributes.number === undefined) {
throw new Error('"number" attribute should be present');
}
doc.attributes = {
...doc.attributes,
number: doc.attributes.number + 9000,
};
return doc;
},
},
});
try {
await root.start();
expect(true).toEqual(false);
} catch (err) {
const errorMessage = err.message as string;
const errorLines = errorMessage.split('\n');
const errorMessageWithoutStack = errorLines
.filter((line: string) => !line.includes(' at '))
.join('\n');
expect(errorMessageWithoutStack).toMatchInlineSnapshot(`
"Unable to complete saved object migrations for the [.kibana] index: Migrations failed. Reason: 2 transformation errors were encountered:
- foo:3: Error: Migration function for version 7.14.0 threw an error
Caused by:
Error: \\"number\\" attribute should be present
- foo:4: Error: Migration function for version 7.14.0 threw an error
Caused by:
Error: \\"number\\" attribute should be present
To allow migrations to proceed, please delete or fix these documents.
Note that you can configure Kibana to automatically discard corrupt documents and transform errors for this migration.
Please refer to ${migrationDocLink} for more information."
`);
expectMatchOrder(errorLines, [
{
mode: 'equal',
value: '- foo:3: Error: Migration function for version 7.14.0 threw an error',
},
{
mode: 'contain',
value: 'at transform',
},
{
mode: 'equal',
value: 'Caused by:',
},
{
mode: 'equal',
value: 'Error: "number" attribute should be present',
},
{
mode: 'contain',
value: 'at 7.14.0',
},
{
mode: 'equal',
value: '- foo:4: Error: Migration function for version 7.14.0 threw an error',
},
{
mode: 'contain',
value: 'at transform',
},
{
mode: 'equal',
value: 'Caused by:',
},
{
mode: 'equal',
value: 'Error: "number" attribute should be present',
},
{
mode: 'contain',
value: 'at 7.14.0',
},
]);
}
});
});
function createRoot() {
return createRootWithCorePlugins(
{
migrations: {
skip: false,
batchSize: 5,
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFilePath,
layout: {
type: 'json',
},
},
},
loggers: [
{
name: 'root',
appenders: ['file'],
level: 'info',
},
],
},
},
{
oss: false,
}
);
}
type FindInOrderPattern = { mode: 'equal'; value: string } | { mode: 'contain'; value: string };
const expectMatchOrder = (lines: string[], patterns: FindInOrderPattern[]) => {
let lineIdx = 0;
let patternIdx = 0;
while (lineIdx < lines.length && patternIdx < patterns.length) {
const line = lines[lineIdx];
const pattern = patterns[patternIdx];
if (lineMatch(line, pattern)) {
patternIdx++;
}
lineIdx++;
}
expect(patternIdx).toEqual(patterns.length);
};
const lineMatch = (line: string, pattern: FindInOrderPattern) => {
if (pattern.mode === 'contain') {
return line.trim().includes(pattern.value.trim());
}
return line.trim() === pattern.value.trim();
};

View file

@ -1,184 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import Fs from 'fs';
import Util from 'util';
import {
createTestServers,
createRootWithCorePlugins,
type TestElasticsearchUtils,
} from '@kbn/core-test-helpers-kbn-server';
import { Root } from '@kbn/core-root-server-internal';
const logFilePath = Path.join(__dirname, 'corrupt_outdated_docs.log');
const asyncUnlink = Util.promisify(Fs.unlink);
async function removeLogFile() {
// ignore errors if it doesn't exist
await asyncUnlink(logFilePath).catch(() => void 0);
}
describe('migration v2 with corrupt saved object documents', () => {
let esServer: TestElasticsearchUtils;
let root: Root;
beforeAll(async () => {
await removeLogFile();
});
afterAll(async () => {
if (root) {
await root.shutdown();
}
if (esServer) {
await esServer.stop();
}
});
it.skip('collects corrupt saved object documents across batches', async () => {
const { startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
// original uncorrupt SO:
// {
// type: 'foo', // 'bar', 'baz'
// foo: {}, // bar: {}, baz: {}
// migrationVersion: {
// foo: '7.13.0',
// },
// },
// original corrupt SO example:
// {
// id: 'bar:123' // '123' etc
// type: 'foo',
// foo: {},
// migrationVersion: {
// foo: '7.13.0',
// },
// },
// contains migrated index with 8.0 aliases to skip migration, but run outdated doc search
dataArchive: Path.join(
__dirname,
'archives',
'8.0.0_migrated_with_corrupt_outdated_docs.zip'
),
},
},
});
root = createRoot();
esServer = await startES();
await root.preboot();
const coreSetup = await root.setup();
coreSetup.savedObjects.registerType({
name: 'foo',
hidden: false,
mappings: { properties: {} },
namespaceType: 'agnostic',
migrations: {
'7.14.0': (doc) => doc,
},
});
coreSetup.savedObjects.registerType({
name: 'bar',
hidden: false,
mappings: { properties: {} },
namespaceType: 'agnostic',
migrations: {
'7.14.0': (doc) => doc,
},
});
coreSetup.savedObjects.registerType({
name: 'baz',
hidden: false,
mappings: { properties: {} },
namespaceType: 'agnostic',
migrations: {
'7.14.0': (doc) => doc,
},
});
try {
await root.start();
} catch (err) {
const errorMessage = err.message;
expect(
errorMessage.startsWith(
'Unable to complete saved object migrations for the [.kibana] index: Migrations failed. Reason: 19 corrupt saved object documents were found: '
)
).toBeTruthy();
expect(
errorMessage.endsWith(
'To allow migrations to proceed, please delete or fix these documents.'
)
).toBeTruthy();
const expectedCorruptDocIds = [
'"foo:my_name"',
'"123"',
'"456"',
'"789"',
'"foo:other_name"',
'"bar:123"',
'"baz:123"',
'"bar:345"',
'"bar:890"',
'"baz:456"',
'"baz:789"',
'"bar:other_name"',
'"baz:other_name"',
'"bar:my_name"',
'"baz:my_name"',
'"foo:123"',
'"foo:456"',
'"foo:789"',
'"foo:other"',
];
for (const corruptDocId of expectedCorruptDocIds) {
expect(errorMessage.includes(corruptDocId)).toBeTruthy();
}
}
});
});
function createRoot() {
return createRootWithCorePlugins(
{
migrations: {
skip: false,
batchSize: 5,
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFilePath,
layout: {
type: 'json',
},
},
},
loggers: [
{
name: 'root',
appenders: ['file'],
level: 'info',
},
],
},
},
{
oss: true,
}
);
}

View file

@ -0,0 +1,290 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import { join } from 'path';
import { omit, sortBy } from 'lodash';
import type { TestElasticsearchUtils } from '@kbn/core-test-helpers-kbn-server';
import type { MigrationResult } from '@kbn/core-saved-objects-base-server-internal';
import type { Client } from '@elastic/elasticsearch';
import {
clearLog,
defaultKibanaIndex,
defaultKibanaTaskIndex,
getAggregatedTypesCount,
getEsClient,
nextMinor,
startElasticsearch,
} from '../kibana_migrator_test_kit';
import {
BASELINE_COMPLEX_DOCUMENTS_500K_AFTER,
BASELINE_DOCUMENTS_PER_TYPE_500K,
BASELINE_TEST_ARCHIVE_500K,
} from '../kibana_migrator_archive_utils';
import {
getRelocatingMigratorTestKit,
kibanaSplitIndex,
} from '../kibana_migrator_test_kit.fixtures';
import { delay, parseLogFile } from '../test_utils';
import '../jest_matchers';
import { expectDocumentsMigratedToHighestVersion } from '../kibana_migrator_test_kit.expect';
const PARALLEL_MIGRATORS = 4;
type Job<T> = () => Promise<T>;
const getLogFile = (node: number) => join(__dirname, `multiple_kb_nodes_${node}.log`);
const logFileSecondRun = join(__dirname, `multiple_kb_nodes_second_run.log`);
describe('multiple Kibana nodes performing a reindexing migration', () => {
jest.setTimeout(1200000); // costly test
let esServer: TestElasticsearchUtils['es'];
let client: Client;
let results: MigrationResult[][];
beforeEach(async () => {
for (let i = 0; i < PARALLEL_MIGRATORS; ++i) {
await clearLog(getLogFile(i));
}
await clearLog(logFileSecondRun);
esServer = await startElasticsearch({ dataArchive: BASELINE_TEST_ARCHIVE_500K });
client = await getEsClient();
await checkBeforeState();
});
it.each([
{
case: 'migrate saved objects normally when started at the same time',
delaySeconds: 0,
},
{
case: 'migrate saved objects normally when started with a small interval',
delaySeconds: 1,
},
{
case: 'migrate saved objects normally when started with an average interval',
delaySeconds: 5,
},
{
case: 'migrate saved objects normally when started with a bigger interval',
delaySeconds: 20,
},
])('$case', async ({ delaySeconds }) => {
const jobs = await createMigratorJobs(PARALLEL_MIGRATORS);
results = await startWithDelay(jobs, delaySeconds);
checkMigratorsResults();
await checkIndicesInfo();
await checkSavedObjectDocuments();
await checkFirstNodeSteps();
await checkUpToDateOnRestart();
});
afterEach(async () => {
await esServer?.stop();
await delay(5); // give it a few seconds... cause we always do ¯\_(ツ)_/¯
});
async function checkBeforeState() {
await expect(getAggregatedTypesCount(client, [defaultKibanaIndex])).resolves.toEqual({
basic: BASELINE_DOCUMENTS_PER_TYPE_500K,
complex: BASELINE_DOCUMENTS_PER_TYPE_500K,
deprecated: BASELINE_DOCUMENTS_PER_TYPE_500K,
server: BASELINE_DOCUMENTS_PER_TYPE_500K,
});
await expect(getAggregatedTypesCount(client, [defaultKibanaTaskIndex])).resolves.toEqual({
task: BASELINE_DOCUMENTS_PER_TYPE_500K,
});
await expect(getAggregatedTypesCount(client, [kibanaSplitIndex])).resolves.toEqual({});
}
function checkMigratorsResults() {
const flatResults = results.flat(); // multiple nodes, multiple migrators each
// each migrator should take less than 120 seconds
const painfulMigrator = (flatResults as Array<{ elapsedMs?: number }>).find(
({ elapsedMs }) => elapsedMs && elapsedMs > 120_000
);
expect(painfulMigrator).toBeUndefined();
// each migrator has either migrated or patched
const failedMigrator = flatResults.find(
({ status }) => status !== 'migrated' && status !== 'patched'
);
expect(failedMigrator).toBeUndefined();
}
async function checkIndicesInfo() {
const indicesInfo = await client.indices.get({ index: '.kibana*' });
[defaultKibanaIndex, kibanaSplitIndex].forEach((index) =>
expect(indicesInfo[`${index}_${nextMinor}_001`]).toEqual(
expect.objectContaining({
aliases: expect.objectContaining({ [index]: expect.any(Object) }),
mappings: {
dynamic: 'strict',
_meta: {
mappingVersions: expect.any(Object),
indexTypesMap: expect.any(Object),
},
properties: expect.any(Object),
},
settings: { index: expect.any(Object) },
})
)
);
const typesMap =
indicesInfo[`${defaultKibanaIndex}_${nextMinor}_001`].mappings?._meta?.indexTypesMap;
expect(typesMap[defaultKibanaIndex]).toEqual(['complex', 'server']); // 'deprecated' no longer present
expect(typesMap[kibanaSplitIndex]).toEqual(['basic', 'task']);
}
async function checkSavedObjectDocuments() {
// check documents have been migrated
await expect(getAggregatedTypesCount(client, [defaultKibanaIndex])).resolves.toEqual({
complex: BASELINE_COMPLEX_DOCUMENTS_500K_AFTER,
});
await expect(getAggregatedTypesCount(client, [defaultKibanaTaskIndex])).resolves.toEqual({});
await expect(getAggregatedTypesCount(client, [kibanaSplitIndex])).resolves.toEqual({
basic: BASELINE_DOCUMENTS_PER_TYPE_500K,
task: BASELINE_DOCUMENTS_PER_TYPE_500K,
});
await expectDocumentsMigratedToHighestVersion(client, [defaultKibanaIndex, kibanaSplitIndex]);
}
async function checkFirstNodeSteps() {
const logs = await parseLogFile(getLogFile(0));
// '.kibana_migrator_split' is a new index, all nodes' migrators must attempt to create it
expect(logs).toContainLogEntries(
[
`[${kibanaSplitIndex}] INIT -> CREATE_REINDEX_TEMP.`,
`[${kibanaSplitIndex}] CREATE_REINDEX_TEMP -> READY_TO_REINDEX_SYNC.`,
// no docs to reindex, as source index did NOT exist
`[${kibanaSplitIndex}] READY_TO_REINDEX_SYNC -> DONE_REINDEXING_SYNC.`,
],
{ ordered: true }
);
// '.kibana_migrator' and '.kibana_migrator_tasks' are involved in a relocation
[defaultKibanaIndex, defaultKibanaTaskIndex].forEach((index) => {
expect(logs).toContainLogEntries(
[
`[${index}] INIT -> WAIT_FOR_YELLOW_SOURCE.`,
`[${index}] WAIT_FOR_YELLOW_SOURCE -> CHECK_CLUSTER_ROUTING_ALLOCATION.`,
`[${index}] CHECK_CLUSTER_ROUTING_ALLOCATION -> CHECK_UNKNOWN_DOCUMENTS.`,
`[${index}] CHECK_UNKNOWN_DOCUMENTS -> SET_SOURCE_WRITE_BLOCK.`,
`[${index}] SET_SOURCE_WRITE_BLOCK -> CALCULATE_EXCLUDE_FILTERS.`,
`[${index}] CALCULATE_EXCLUDE_FILTERS -> CREATE_REINDEX_TEMP.`,
`[${index}] CREATE_REINDEX_TEMP -> READY_TO_REINDEX_SYNC.`,
`[${index}] READY_TO_REINDEX_SYNC -> REINDEX_SOURCE_TO_TEMP_OPEN_PIT.`,
`[${index}] REINDEX_SOURCE_TO_TEMP_OPEN_PIT -> REINDEX_SOURCE_TO_TEMP_READ.`,
`[${index}] REINDEX_SOURCE_TO_TEMP_READ -> REINDEX_SOURCE_TO_TEMP_TRANSFORM.`,
`[${index}] REINDEX_SOURCE_TO_TEMP_TRANSFORM -> REINDEX_SOURCE_TO_TEMP_INDEX_BULK.`,
`[${index}] REINDEX_SOURCE_TO_TEMP_INDEX_BULK`,
// if the index is closed by another node, we will have instead: REINDEX_SOURCE_TO_TEMP_TRANSFORM => REINDEX_SOURCE_TO_TEMP_CLOSE_PIT.
// `[${index}] REINDEX_SOURCE_TO_TEMP_READ -> REINDEX_SOURCE_TO_TEMP_CLOSE_PIT.`,
`[${index}] REINDEX_SOURCE_TO_TEMP_CLOSE_PIT -> DONE_REINDEXING_SYNC.`,
],
{ ordered: true }
);
});
// after the relocation, all migrators share the final part of the flow
[defaultKibanaIndex, defaultKibanaTaskIndex, kibanaSplitIndex].forEach((index) => {
expect(logs).toContainLogEntries(
[
`[${index}] DONE_REINDEXING_SYNC -> SET_TEMP_WRITE_BLOCK.`,
`[${index}] SET_TEMP_WRITE_BLOCK -> CLONE_TEMP_TO_TARGET.`,
`[${index}] CLONE_TEMP_TO_TARGET -> REFRESH_TARGET.`,
`[${index}] REFRESH_TARGET -> OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT.`,
`[${index}] OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT -> OUTDATED_DOCUMENTS_SEARCH_READ.`,
`[${index}] OUTDATED_DOCUMENTS_SEARCH_READ -> OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT.`,
`[${index}] OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT -> CHECK_TARGET_MAPPINGS.`,
`[${index}] CHECK_TARGET_MAPPINGS -> UPDATE_TARGET_MAPPINGS_PROPERTIES.`,
`[${index}] UPDATE_TARGET_MAPPINGS_PROPERTIES -> UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK.`,
`[${index}] UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK -> UPDATE_TARGET_MAPPINGS_META.`,
`[${index}] UPDATE_TARGET_MAPPINGS_META -> CHECK_VERSION_INDEX_READY_ACTIONS.`,
`[${index}] CHECK_VERSION_INDEX_READY_ACTIONS -> MARK_VERSION_INDEX_READY_SYNC.`,
`[${index}] MARK_VERSION_INDEX_READY_SYNC`, // all migrators try to update all aliases, all but one will have conclicts
`[${index}] Migration completed after`,
],
{ ordered: true }
);
});
// should NOT retransform anything (we reindexed, thus we transformed already)
[defaultKibanaIndex, defaultKibanaTaskIndex, kibanaSplitIndex].forEach((index) => {
expect(logs).not.toContainLogEntry(`[${index}] OUTDATED_DOCUMENTS_TRANSFORM`);
expect(logs).not.toContainLogEntry(
`[${index}] Kibana is performing a compatible update and it will update the following SO types so that ES can pickup the updated mappings`
);
});
}
async function checkUpToDateOnRestart() {
// run a new migrator to ensure everything is up to date
const { runMigrations } = await getRelocatingMigratorTestKit({
logFilePath: logFileSecondRun,
// no need to filter deprecated this time, they should not be there anymore
});
const secondRunResults = await runMigrations();
expect(
sortBy(
secondRunResults.map((result) => omit(result, 'elapsedMs')),
'destIndex'
)
).toEqual([
{
destIndex: `.kibana_migrator_${nextMinor}_001`,
status: 'patched',
},
{
destIndex: `.kibana_migrator_split_${nextMinor}_001`,
status: 'patched',
},
]);
const logs = await parseLogFile(logFileSecondRun);
expect(logs).not.toContainLogEntries(['REINDEX', 'CREATE', 'UPDATE_TARGET_MAPPINGS']);
}
});
async function createMigratorJobs(nodes: number): Promise<Array<Job<MigrationResult[]>>> {
const jobs: Array<Job<MigrationResult[]>> = [];
for (let i = 0; i < nodes; ++i) {
const kit = await getRelocatingMigratorTestKit({
logFilePath: getLogFile(i),
filterDeprecated: true,
});
jobs.push(kit.runMigrations);
}
return jobs;
}
async function startWithDelay<T>(runnables: Array<Job<T>>, delayInSec: number) {
const promises: Array<Promise<T>> = [];
const errors: string[] = [];
for (let i = 0; i < runnables.length; i++) {
promises.push(
runnables[i]().catch((reason) => {
errors.push(reason.message ?? reason);
return reason;
})
);
if (i < runnables.length - 2) {
// We wait between instances, but not after the last one
await delay(delayInSec);
}
}
const results = await Promise.all(promises);
if (errors.length) {
throw new Error(`Failed to run all parallel jobs: ${errors.join(',')}`);
} else {
return results;
}
}

View file

@ -1,273 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import del from 'del';
import { esTestConfig, kibanaServerTestUser } from '@kbn/test';
import { kibanaPackageJson as pkg } from '@kbn/repo-info';
import type { SavedObjectsType } from '@kbn/core-saved-objects-server';
import {
createTestServers,
createRoot as createkbnTestServerRoot,
type TestElasticsearchUtils,
} from '@kbn/core-test-helpers-kbn-server';
import type { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { Root } from '@kbn/core-root-server-internal';
const LOG_FILE_PREFIX = 'migration_test_multiple_kibana_nodes';
async function removeLogFiles() {
await del([Path.join(__dirname, `${LOG_FILE_PREFIX}_*.log`)], { force: true });
}
function extractSortNumberFromId(id: string): number {
const parsedId = parseInt(id.split(':')[1], 10); // "foo:123" -> 123
if (isNaN(parsedId)) {
throw new Error(`Failed to parse Saved Object ID [${id}]. Result is NaN`);
}
return parsedId;
}
async function fetchDocs(esClient: ElasticsearchClient, index: string) {
const body = await esClient.search<any>({
index,
size: 10000,
body: {
query: {
bool: {
should: [
{
term: { type: 'foo' },
},
],
},
},
},
});
return body.hits.hits
.map((h) => ({
...h._source,
id: h._id,
}))
.sort((a, b) => extractSortNumberFromId(a.id) - extractSortNumberFromId(b.id));
}
interface CreateRootConfig {
logFileName: string;
}
async function createRoot({ logFileName }: CreateRootConfig) {
const root = createkbnTestServerRoot({
elasticsearch: {
hosts: [esTestConfig.getUrl()],
username: kibanaServerTestUser.username,
password: kibanaServerTestUser.password,
},
migrations: {
skip: false,
batchSize: 100, // fixture contains 5000 docs
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFileName,
layout: {
type: 'pattern',
},
},
},
loggers: [
{
name: 'root',
appenders: ['file'],
level: 'info',
},
{
name: 'savedobjects-service',
appenders: ['file'],
level: 'debug',
},
],
},
});
await root.preboot();
return root;
}
// suite is very long, the 10mins default can cause timeouts
jest.setTimeout(15 * 60 * 1000);
// FLAKY: https://github.com/elastic/kibana/issues/156117
describe.skip('migration v2', () => {
let esServer: TestElasticsearchUtils;
let rootA: Root;
let rootB: Root;
let rootC: Root;
const migratedIndexAlias = `.kibana_${pkg.version}`;
const fooType: SavedObjectsType = {
name: 'foo',
hidden: false,
mappings: { properties: { status: { type: 'text' } } },
namespaceType: 'agnostic',
migrations: {
'7.14.0': (doc) => {
if (doc.attributes?.status) {
doc.attributes.status = doc.attributes.status.replace('unmigrated', 'migrated');
}
return doc;
},
},
};
const delay = (timeInMs: number) => new Promise((resolve) => setTimeout(resolve, timeInMs));
beforeEach(async () => {
await removeLogFiles();
rootA = await createRoot({
logFileName: Path.join(__dirname, `${LOG_FILE_PREFIX}_A.log`),
});
rootB = await createRoot({
logFileName: Path.join(__dirname, `${LOG_FILE_PREFIX}_B.log`),
});
rootC = await createRoot({
logFileName: Path.join(__dirname, `${LOG_FILE_PREFIX}_C.log`),
});
const { startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
// original SOs: 5k of `foo` docs with this structure:
// [
// { id: 'foo:1', type: 'foo', foo: { status: 'unmigrated' }, migrationVersion: { foo: '7.13.0' } },
// { id: 'foo:2', type: 'foo', foo: { status: 'unmigrated' }, migrationVersion: { foo: '7.13.0' } },
// { id: 'foo:3', type: 'foo', foo: { status: 'unmigrated' }, migrationVersion: { foo: '7.13.0' } },
// ];
dataArchive: Path.join(__dirname, '..', 'archives', '7.13.0_concurrent_5k_foo.zip'),
},
},
});
esServer = await startES();
});
afterEach(async () => {
try {
await Promise.all([rootA.shutdown(), rootB.shutdown(), rootC.shutdown()]);
} catch (e) {
/* trap */
}
if (esServer) {
await esServer.stop();
}
});
const startWithDelay = async (instances: Root[], delayInSec: number) => {
const promises: Array<Promise<unknown>> = [];
const errors: string[] = [];
for (let i = 0; i < instances.length; i++) {
promises.push(
instances[i].start().catch((err) => {
errors.push(err.message);
})
);
if (i < instances.length - 2) {
// We wait between instances, but not after the last one
await delay(delayInSec * 1000);
}
}
await Promise.all(promises);
if (errors.length) {
throw new Error(`Failed to start all instances: ${errors.join(',')}`);
}
};
it('migrates saved objects normally when multiple Kibana instances are started at the same time', async () => {
const setupContracts = await Promise.all([rootA.setup(), rootB.setup(), rootC.setup()]);
setupContracts.forEach((setup) => setup.savedObjects.registerType(fooType));
await startWithDelay([rootA, rootB, rootC], 0);
const esClient = esServer.es.getClient();
const migratedDocs = await fetchDocs(esClient, migratedIndexAlias);
expect(migratedDocs.length).toBe(5000);
migratedDocs.forEach((doc, i) => {
expect(doc.id).toBe(`foo:${i}`);
expect(doc.foo.status).toBe(`migrated`);
expect(doc.typeMigrationVersion).toBe('7.14.0');
});
});
it('migrates saved objects normally when multiple Kibana instances are started with a small interval', async () => {
const setupContracts = await Promise.all([rootA.setup(), rootB.setup(), rootC.setup()]);
setupContracts.forEach((setup) => setup.savedObjects.registerType(fooType));
await startWithDelay([rootA, rootB, rootC], 1);
const esClient = esServer.es.getClient();
const migratedDocs = await fetchDocs(esClient, migratedIndexAlias);
expect(migratedDocs.length).toBe(5000);
migratedDocs.forEach((doc, i) => {
expect(doc.id).toBe(`foo:${i}`);
expect(doc.foo.status).toBe(`migrated`);
expect(doc.typeMigrationVersion).toBe('7.14.0');
});
});
it('migrates saved objects normally when multiple Kibana instances are started with an average interval', async () => {
const setupContracts = await Promise.all([rootA.setup(), rootB.setup(), rootC.setup()]);
setupContracts.forEach((setup) => setup.savedObjects.registerType(fooType));
await startWithDelay([rootA, rootB, rootC], 5);
const esClient = esServer.es.getClient();
const migratedDocs = await fetchDocs(esClient, migratedIndexAlias);
expect(migratedDocs.length).toBe(5000);
migratedDocs.forEach((doc, i) => {
expect(doc.id).toBe(`foo:${i}`);
expect(doc.foo.status).toBe(`migrated`);
expect(doc.typeMigrationVersion).toBe('7.14.0');
});
});
it('migrates saved objects normally when multiple Kibana instances are started with a bigger interval', async () => {
const setupContracts = await Promise.all([rootA.setup(), rootB.setup(), rootC.setup()]);
setupContracts.forEach((setup) => setup.savedObjects.registerType(fooType));
await startWithDelay([rootA, rootB, rootC], 20);
const esClient = esServer.es.getClient();
const migratedDocs = await fetchDocs(esClient, migratedIndexAlias);
expect(migratedDocs.length).toBe(5000);
migratedDocs.forEach((doc, i) => {
expect(doc.id).toBe(`foo:${i}`);
expect(doc.foo.status).toBe(`migrated`);
expect(doc.typeMigrationVersion).toBe('7.14.0');
});
});
});

View file

@ -1,149 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import Fs from 'fs';
import Util from 'util';
import { kibanaPackageJson as pkg } from '@kbn/repo-info';
import {
createRootWithCorePlugins,
createTestServers,
type TestElasticsearchUtils,
} from '@kbn/core-test-helpers-kbn-server';
import type { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { Root } from '@kbn/core-root-server-internal';
const logFilePath = Path.join(__dirname, 'outdated_docs.log');
const asyncUnlink = Util.promisify(Fs.unlink);
async function removeLogFile() {
// ignore errors if it doesn't exist
await asyncUnlink(logFilePath).catch(() => void 0);
}
describe('migration v2', () => {
let esServer: TestElasticsearchUtils;
let root: Root;
beforeAll(async () => {
await removeLogFile();
});
afterAll(async () => {
if (root) {
await root.shutdown();
}
if (esServer) {
await esServer.stop();
}
});
it('migrates the documents to the highest version', async () => {
const migratedIndexAlias = `.kibana_${pkg.version}`;
const { startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
license: 'basic',
// original SO:
// {
// type: 'foo',
// foo: {},
// migrationVersion: {
// foo: '7.13.0',
// },
// },
// contains migrated index with 8.0 aliases to skip migration, but run outdated doc search
dataArchive: Path.join(
__dirname,
'..',
'archives',
'8.0.0_migrated_with_outdated_docs.zip'
),
},
},
});
root = createRoot();
esServer = await startES();
await root.preboot();
const coreSetup = await root.setup();
coreSetup.savedObjects.registerType({
name: 'foo',
hidden: false,
mappings: { properties: {} },
namespaceType: 'agnostic',
migrations: {
'7.14.0': (doc) => doc,
},
});
const coreStart = await root.start();
const esClient = coreStart.elasticsearch.client.asInternalUser;
const migratedDocs = await fetchDocs(esClient, migratedIndexAlias);
expect(migratedDocs.length).toBe(1);
const [doc] = migratedDocs;
expect(doc._source.coreMigrationVersion).toBe('8.8.0');
expect(doc._source.typeMigrationVersion).toBe('7.14.0');
});
});
function createRoot() {
return createRootWithCorePlugins(
{
migrations: {
skip: false,
},
logging: {
appenders: {
file: {
type: 'file',
fileName: logFilePath,
layout: {
type: 'json',
},
},
},
loggers: [
{
name: 'root',
level: 'info',
appenders: ['file'],
},
],
},
},
{
oss: true,
}
);
}
async function fetchDocs(esClient: ElasticsearchClient, index: string) {
const body = await esClient.search<any>({
index,
body: {
query: {
bool: {
should: [
{
term: { type: 'foo' },
},
],
},
},
},
});
return body.hits.hits;
}

View file

@ -7,7 +7,6 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import * as Either from 'fp-ts/lib/Either';
import * as Option from 'fp-ts/lib/Option';
import { errors } from '@elastic/elasticsearch';
@ -47,13 +46,15 @@ import {
createBulkIndexOperationTuple,
checkClusterRoutingAllocationEnabled,
} from '@kbn/core-saved-objects-migration-server-internal';
import { BASELINE_TEST_ARCHIVE_1K } from '../../kibana_migrator_archive_utils';
import { defaultKibanaIndex } from '../../kibana_migrator_test_kit';
const { startES } = createTestServers({
adjustTimeout: (t: number) => jest.setTimeout(t),
settings: {
es: {
dataArchive: BASELINE_TEST_ARCHIVE_1K,
license: 'basic',
dataArchive: Path.resolve(__dirname, '../../archives/7.7.2_xpack_100k_obj.zip'),
esArgs: ['http.max_content_length=10Kb'],
},
},
@ -1091,13 +1092,13 @@ describe('migration actions', () => {
it('resolves left wait_for_task_completion_timeout when the task does not finish within the timeout', async () => {
await waitForIndexStatus({
client,
index: '.kibana_1',
index: defaultKibanaIndex,
status: 'yellow',
})();
const res = (await reindex({
client,
sourceIndex: '.kibana_1',
sourceIndex: defaultKibanaIndex,
targetIndex: 'reindex_target',
reindexScript: Option.none,
requireAlias: false,
@ -1434,7 +1435,7 @@ describe('migration actions', () => {
it('resolves left wait_for_task_completion_timeout when the task does not complete within the timeout', async () => {
const res = (await pickupUpdatedMappings(
client,
'.kibana_1',
defaultKibanaIndex,
1000
)()) as Either.Right<UpdateByQueryResponse>;
@ -2030,27 +2031,5 @@ describe('migration actions', () => {
}
`);
});
it('resolves left request_entity_too_large_exception when the payload is too large', async () => {
const newDocs = new Array(10000).fill({
_source: {
title:
'how do I create a document thats large enoug to exceed the limits without typing long sentences',
},
}) as SavedObjectsRawDoc[];
const task = bulkOverwriteTransformedDocuments({
client,
index: 'existing_index_with_docs',
operations: newDocs.map((doc) => createBulkIndexOperationTuple(doc)),
});
await expect(task()).resolves.toMatchInlineSnapshot(`
Object {
"_tag": "Left",
"left": Object {
"type": "request_entity_too_large_exception",
},
}
`);
});
});
});

View file

@ -1999,8 +1999,7 @@ export const runActionTestSuite = ({
});
});
// Failing ES Promotion: https://github.com/elastic/kibana/issues/193592
describe.skip('bulkOverwriteTransformedDocuments', () => {
describe('bulkOverwriteTransformedDocuments', () => {
it('resolves right when documents do not yet exist in the index', async () => {
const newDocs = [
{ _source: { title: 'doc 5' } },
@ -2087,30 +2086,5 @@ export const runActionTestSuite = ({
}
`);
});
// no way to configure http.max_content_length on the serverless instance for now.
runOnTraditionalOnly(() => {
it('resolves left request_entity_too_large_exception when the payload is too large', async () => {
const newDocs = new Array(10000).fill({
_source: {
title:
'how do I create a document thats large enoug to exceed the limits without typing long sentences',
},
}) as SavedObjectsRawDoc[];
const task = bulkOverwriteTransformedDocuments({
client,
index: 'existing_index_with_docs',
operations: newDocs.map((doc) => createBulkIndexOperationTuple(doc)),
});
await expect(task()).resolves.toMatchInlineSnapshot(`
Object {
"_tag": "Left",
"left": Object {
"type": "request_entity_too_large_exception",
},
}
`);
});
});
});
};

View file

@ -7,56 +7,45 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import type { TestElasticsearchUtils } from '@kbn/core-test-helpers-kbn-server';
import {
clearLog,
startElasticsearch,
getKibanaMigratorTestKit,
nextMinor,
defaultKibanaIndex,
defaultKibanaTaskIndex,
currentVersion,
} from '../kibana_migrator_test_kit';
import '../jest_matchers';
import { delay, parseLogFile } from '../test_utils';
import { baselineTypes as types } from '../kibana_migrator_test_kit.fixtures';
export const logFilePath = Path.join(__dirname, 'fail_on_rollback.test.log');
import { delay } from '../test_utils';
import { getUpToDateMigratorTestKit } from '../kibana_migrator_test_kit.fixtures';
import { BASELINE_TEST_ARCHIVE_1K } from '../kibana_migrator_archive_utils';
describe('when rolling back to an older version', () => {
let esServer: TestElasticsearchUtils['es'];
beforeAll(async () => {
esServer = await startElasticsearch();
esServer = await startElasticsearch({ dataArchive: BASELINE_TEST_ARCHIVE_1K });
});
beforeEach(async () => {});
it('kibana should detect that a later version alias exists, and abort', async () => {
// create a current version baseline
const { runMigrations: createBaseline } = await getKibanaMigratorTestKit({
types,
logFilePath,
});
await createBaseline();
// migrate to next minor
const { runMigrations: upgrade } = await getKibanaMigratorTestKit({
kibanaVersion: nextMinor,
types,
logFilePath,
});
const { runMigrations: upgrade } = await getUpToDateMigratorTestKit();
await upgrade();
// run migrations for the current version again (simulate rollback)
const { runMigrations: rollback } = await getKibanaMigratorTestKit({ types, logFilePath });
const { runMigrations: rollback } = await getUpToDateMigratorTestKit({
kibanaVersion: currentVersion,
});
await clearLog(logFilePath);
await expect(rollback()).rejects.toThrowError(
`Unable to complete saved object migrations for the [${defaultKibanaIndex}] index: The ${defaultKibanaIndex}_${nextMinor} alias refers to a newer version of Kibana: v${nextMinor}`
);
const logs = await parseLogFile(logFilePath);
expect(logs).toContainLogEntry('[.kibana_migrator_tests] INIT -> FATAL.');
try {
await rollback();
throw new Error('Rollback should have thrown but it did not');
} catch (error) {
expect([
`Unable to complete saved object migrations for the [${defaultKibanaIndex}] index: The ${defaultKibanaIndex}_${nextMinor} alias refers to a newer version of Kibana: v${nextMinor}`,
`Unable to complete saved object migrations for the [${defaultKibanaTaskIndex}] index: The ${defaultKibanaTaskIndex}_${nextMinor} alias refers to a newer version of Kibana: v${nextMinor}`,
]).toContain(error.message);
}
});
afterAll(async () => {

View file

@ -95,7 +95,8 @@ function createRoot({ logFileName, hosts }: RootConfig) {
});
}
describe('migration v2', () => {
// Failing 9.0 version update: https://github.com/elastic/kibana/issues/192624
describe.skip('migration v2', () => {
let esServer: TestElasticsearchUtils;
let root: Root;
const migratedIndexAlias = `.kibana_${pkg.version}`;

View file

@ -7,16 +7,16 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import fs from 'fs/promises';
import { Root } from '@kbn/core-root-server-internal';
import { join } from 'path';
import type { Root } from '@kbn/core-root-server-internal';
import {
createRootWithCorePlugins,
type TestElasticsearchUtils,
} from '@kbn/core-test-helpers-kbn-server';
import { startElasticsearch } from '../kibana_migrator_test_kit';
import { clearLog, readLog, startElasticsearch } from '../kibana_migrator_test_kit';
import { delay } from '../test_utils';
const logFilePath = Path.join(__dirname, 'read_batch_size.log');
const logFilePath = join(__dirname, 'read_batch_size.log');
describe('migration v2 - read batch size', () => {
let esServer: TestElasticsearchUtils;
@ -25,14 +25,15 @@ describe('migration v2 - read batch size', () => {
beforeEach(async () => {
esServer = await startElasticsearch({
dataArchive: Path.join(__dirname, '..', 'archives', '8.4.0_with_sample_data_logs.zip'),
dataArchive: join(__dirname, '..', 'archives', '8.4.0_with_sample_data_logs.zip'),
});
await fs.unlink(logFilePath).catch(() => {});
await clearLog(logFilePath);
});
afterEach(async () => {
await root?.shutdown();
await esServer?.stop();
await delay(5); // give it a few seconds... cause we always do ¯\_(ツ)_/¯
});
it('reduces the read batchSize in half if a batch exceeds maxReadBatchSizeBytes', async () => {
@ -42,7 +43,7 @@ describe('migration v2 - read batch size', () => {
await root.start();
// Check for migration steps present in the logs
logs = await fs.readFile(logFilePath, 'utf-8');
logs = await readLog(logFilePath);
expect(logs).toMatch(
/Read a batch with a response content length of \d+ bytes which exceeds migrations\.maxReadBatchSizeBytes, retrying by reducing the batch size in half to 15/
@ -57,7 +58,7 @@ describe('migration v2 - read batch size', () => {
await root.start();
// Check for migration steps present in the logs
logs = await fs.readFile(logFilePath, 'utf-8');
logs = await readLog(logFilePath);
expect(logs).not.toMatch('retrying by reducing the batch size in half to');
expect(logs).toMatch('[.kibana] Migration completed');

View file

@ -7,25 +7,23 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import { join } from 'path';
import type { TestElasticsearchUtils } from '@kbn/core-test-helpers-kbn-server';
import {
type ISavedObjectTypeRegistry,
type SavedObjectsType,
MAIN_SAVED_OBJECT_INDEX,
} from '@kbn/core-saved-objects-server';
import { DEFAULT_INDEX_TYPES_MAP } from '@kbn/core-saved-objects-base-server-internal';
import type { CloneIndexParams } from '@kbn/core-saved-objects-migration-server-internal/src/actions';
import {
clearLog,
startElasticsearch,
getKibanaMigratorTestKit,
getCurrentVersionTypeRegistry,
overrideTypeRegistry,
getAggregatedTypesCount,
type KibanaMigratorTestKit,
defaultKibanaTaskIndex,
defaultKibanaIndex,
} from '../kibana_migrator_test_kit';
import { BASELINE_TEST_ARCHIVE_1K } from '../kibana_migrator_archive_utils';
import {
getRelocatingMigratorTestKit,
kibanaSplitIndex,
} from '../kibana_migrator_test_kit.fixtures';
import { delay } from '../test_utils';
import '../jest_matchers';
@ -38,41 +36,24 @@ jest.mock('@kbn/core-saved-objects-migration-server-internal/src/actions/clone_i
...realModule,
cloneIndex: (params: CloneIndexParams) => async () => {
// we need to slow down the clone operation for indices other than
// .kibana so that .kibana can completely finish the migration before we
// .kibana_migrator so that .kibana_migrator can completely finish the migration before we
// fail
if (params.target.includes('slow_clone'))
await new Promise((resolve) => setTimeout(resolve, 1000));
if (!params.target.includes('tasks') && !params.target.includes('new'))
await new Promise((resolve) => setTimeout(resolve, 2000));
return realModule.cloneIndex(params)();
},
};
});
// define a type => index distribution
const RELOCATE_TYPES: Record<string, string> = {
dashboard: '.kibana_slow_clone_1',
visualization: '.kibana_slow_clone_1',
'canvas-workpad': '.kibana_slow_clone_1',
search: '.kibana_slow_clone_2',
task: '.kibana_task_manager_new', // force reindex
'epm-packages-assets': '.kibana_slow_clone_1',
// the remaining types will be forced to go to '.kibana',
// overriding `indexPattern: foo` defined in the registry
};
export const logFilePath = Path.join(__dirname, 'split_failed_to_clone.test.log');
export const logFilePath = join(__dirname, 'split_failed_to_clone.test.log');
describe('when splitting .kibana into multiple indices and one clone fails', () => {
let esServer: TestElasticsearchUtils['es'];
let typeRegistry: ISavedObjectTypeRegistry;
let migratorTestKitFactory: () => Promise<KibanaMigratorTestKit>;
beforeAll(async () => {
typeRegistry = await getCurrentVersionTypeRegistry({ oss: false });
await clearLog(logFilePath);
esServer = await startElasticsearch({
dataArchive: Path.join(__dirname, '..', 'archives', '7.14.0_xpack_sample_saved_objects.zip'),
timeout: 60000,
});
esServer = await startElasticsearch({ dataArchive: BASELINE_TEST_ARCHIVE_1K });
});
afterAll(async () => {
@ -81,59 +62,33 @@ describe('when splitting .kibana into multiple indices and one clone fails', ()
});
it('after resolving the problem and retrying the migration completes successfully', async () => {
const updatedTypeRegistry = overrideTypeRegistry(
typeRegistry,
(type: SavedObjectsType<any>) => {
return {
...type,
indexPattern: RELOCATE_TYPES[type.name] ?? MAIN_SAVED_OBJECT_INDEX,
};
}
);
migratorTestKitFactory = () =>
getKibanaMigratorTestKit({
types: updatedTypeRegistry.getAllTypes(),
kibanaIndex: '.kibana',
getRelocatingMigratorTestKit({
logFilePath,
defaultIndexTypesMap: DEFAULT_INDEX_TYPES_MAP,
filterDeprecated: true,
relocateTypes: {
// move 'basic' to a new index
basic: kibanaSplitIndex,
},
});
const { runMigrations: runMigrationsWhichFailsWhenCloning, client } =
await migratorTestKitFactory();
// count of types in the legacy index
expect(await getAggregatedTypesCount(client, '.kibana')).toEqual({
'apm-telemetry': 1,
application_usage_daily: 4,
'canvas-workpad': 3,
'canvas-workpad-template': 5,
config: 1,
'core-usage-stats': 1,
dashboard: 19,
'epm-packages': 3,
'epm-packages-assets': 293,
event_loop_delays_daily: 1,
'graph-workspace': 3,
'index-pattern': 5,
'ingest-agent-policies': 2,
'ingest-outputs': 1,
'ingest-package-policies': 2,
ingest_manager_settings: 1,
map: 3,
'osquery-usage-metric': 1,
'sample-data-telemetry': 3,
search: 14,
space: 1,
'spaces-usage-stats': 1,
telemetry: 1,
'ui-metric': 5,
'usage-counters': 4,
visualization: 173,
// ensure we have a valid 'before' state
expect(await getAggregatedTypesCount(client, defaultKibanaIndex)).toEqual({
basic: 200,
complex: 200,
deprecated: 200,
server: 200,
});
expect(await getAggregatedTypesCount(client, defaultKibanaTaskIndex)).toEqual({
task: 200,
});
expect(await getAggregatedTypesCount(client, kibanaSplitIndex)).toEqual({});
// cause a failure when cloning .kibana_slow_clone_* indices
await client.cluster.putSettings({ persistent: { 'cluster.max_shards_per_node': 15 } });
await client.cluster.putSettings({ persistent: { 'cluster.max_shards_per_node': 6 } });
await expect(runMigrationsWhichFailsWhenCloning()).rejects.toThrowError(
/cluster_shard_limit_exceeded/
@ -145,43 +100,16 @@ describe('when splitting .kibana into multiple indices and one clone fails', ()
const { runMigrations: runMigrations2ndTime } = await migratorTestKitFactory();
await runMigrations2ndTime();
expect(await getAggregatedTypesCount(client, '.kibana')).toMatchInlineSnapshot(`
Object {
"apm-telemetry": 1,
"application_usage_daily": 4,
"canvas-workpad-template": 5,
"config": 1,
"core-usage-stats": 1,
"epm-packages": 3,
"event_loop_delays_daily": 1,
"graph-workspace": 3,
"index-pattern": 5,
"ingest-agent-policies": 2,
"ingest-outputs": 1,
"ingest-package-policies": 2,
"ingest_manager_settings": 1,
"map": 3,
"sample-data-telemetry": 3,
"space": 1,
"spaces-usage-stats": 1,
"telemetry": 1,
"ui-metric": 5,
"usage-counters": 4,
}
`);
expect(await getAggregatedTypesCount(client, '.kibana_slow_clone_1')).toMatchInlineSnapshot(`
Object {
"canvas-workpad": 3,
"dashboard": 19,
"epm-packages-assets": 293,
"visualization": 173,
}
`);
expect(await getAggregatedTypesCount(client, '.kibana_slow_clone_2')).toMatchInlineSnapshot(`
Object {
"search": 14,
}
`);
// ensure we have a valid 'after' state
expect(await getAggregatedTypesCount(client, defaultKibanaIndex)).toEqual({
complex: 99,
});
expect(await getAggregatedTypesCount(client, defaultKibanaTaskIndex)).toEqual({
task: 200,
});
expect(await getAggregatedTypesCount(client, kibanaSplitIndex)).toEqual({
basic: 200,
});
// If we run a third time, we should not get any errors
const { runMigrations: runMigrations3rdTime } = await migratorTestKitFactory();

View file

@ -1,83 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import type { SavedObjectsBulkCreateObject } from '@kbn/core-saved-objects-api-server';
import type { SavedObjectsType } from '@kbn/core-saved-objects-server';
const defaultType: SavedObjectsType<any> = {
name: 'defaultType',
hidden: false,
namespaceType: 'agnostic',
mappings: {
properties: {
name: { type: 'keyword' },
},
},
migrations: {},
};
export const baselineTypes: Array<SavedObjectsType<any>> = [
{
...defaultType,
name: 'server',
},
{
...defaultType,
name: 'basic',
},
{
...defaultType,
name: 'deprecated',
},
{
...defaultType,
name: 'complex',
mappings: {
properties: {
name: { type: 'text' },
value: { type: 'integer' },
},
},
excludeOnUpgrade: () => {
return {
bool: {
must: [{ term: { type: 'complex' } }, { range: { 'complex.value': { lte: 1 } } }],
},
};
},
},
];
export const baselineDocuments: SavedObjectsBulkCreateObject[] = [
...['server-foo', 'server-bar', 'server-baz'].map((name) => ({
type: 'server',
attributes: {
name,
},
})),
...['basic-foo', 'basic-bar', 'basic-baz'].map((name) => ({
type: 'basic',
attributes: {
name,
},
})),
...['deprecated-foo', 'deprecated-bar', 'deprecated-baz'].map((name) => ({
type: 'deprecated',
attributes: {
name,
},
})),
...['complex-foo', 'complex-bar', 'complex-baz', 'complex-lipsum'].map((name, index) => ({
type: 'complex',
attributes: {
name,
value: index,
},
})),
];

View file

@ -14,16 +14,19 @@ import {
readLog,
clearLog,
nextMinor,
createBaseline,
currentVersion,
defaultKibanaIndex,
startElasticsearch,
getCompatibleMappingsMigrator,
getIdenticalMappingsMigrator,
getIncompatibleMappingsMigrator,
getNonDeprecatedMappingsMigrator,
getAggregatedTypesCount,
} from '../kibana_migrator_test_kit';
import {
createBaseline,
getCompatibleMigratorTestKit,
getUpToDateMigratorTestKit,
getReindexingMigratorTestKit,
} from '../kibana_migrator_test_kit.fixtures';
describe('when upgrading to a new stack version', () => {
let esServer: TestElasticsearchUtils['es'];
let esClient: ElasticsearchClient;
@ -41,11 +44,12 @@ describe('when upgrading to a new stack version', () => {
let indexContents: SearchResponse<{ type: string }, Record<string, AggregationsAggregate>>;
beforeAll(async () => {
esClient = await createBaseline();
esClient = await createBaseline({ documentsPerType: 10 });
await clearLog();
// remove the 'deprecated' type from the mappings, so that it is considered unknown
const { client, runMigrations } = await getNonDeprecatedMappingsMigrator({
const { client, runMigrations } = await getUpToDateMigratorTestKit({
filterDeprecated: true,
settings: {
migrations: {
discardUnknownObjects: nextMinor,
@ -88,7 +92,7 @@ describe('when upgrading to a new stack version', () => {
describe('CLEANUP_UNKNOWN_AND_EXCLUDED', () => {
it('preserves documents with known types', async () => {
expect(countResultsByType(indexContents, 'basic')).toEqual(3);
expect(countResultsByType(indexContents, 'basic')).toEqual(10);
});
it('deletes documents with unknown types', async () => {
@ -104,24 +108,19 @@ describe('when upgrading to a new stack version', () => {
(result) => result._source?.type === 'complex'
);
expect(complexDocuments.length).toEqual(2);
expect(complexDocuments[0]._source).toEqual(
expect.objectContaining({
complex: {
name: 'complex-baz',
value: 2,
},
type: 'complex',
})
);
expect(complexDocuments[1]._source).toEqual(
expect.objectContaining({
complex: {
name: 'complex-lipsum',
value: 3,
},
type: 'complex',
})
expect(complexDocuments.length).toEqual(5);
complexDocuments.forEach(({ _source }, value) =>
expect(_source).toEqual(
expect.objectContaining({
complex: {
name: `complex-${value}`,
firstHalf: true,
value,
},
type: 'complex',
})
)
);
});
});
@ -129,7 +128,7 @@ describe('when upgrading to a new stack version', () => {
describe('and discardUnknownObjects = false', () => {
beforeAll(async () => {
esClient = await createBaseline();
esClient = await createBaseline({ documentsPerType: 10 });
});
afterAll(async () => {
await esClient?.indices.delete({ index: `${defaultKibanaIndex}_${currentVersion}_001` });
@ -138,32 +137,8 @@ describe('when upgrading to a new stack version', () => {
await clearLog();
});
it('fails if unknown documents exist', async () => {
// remove the 'deprecated' type from the mappings, so that it is considered unknown
const { runMigrations } = await getNonDeprecatedMappingsMigrator();
try {
await runMigrations();
} catch (err) {
const errorMessage = err.message;
expect(errorMessage).toMatch(
'Unable to complete saved object migrations for the [.kibana_migrator_tests] index: Migration failed because some documents were found which use unknown saved object types:'
);
expect(errorMessage).toMatch(
'To proceed with the migration you can configure Kibana to discard unknown saved objects for this migration.'
);
expect(errorMessage).toMatch(/deprecated:.*\(type: "deprecated"\)/);
}
const logs = await readLog();
expect(logs).toMatch('INIT -> WAIT_FOR_YELLOW_SOURCE.');
expect(logs).toMatch('WAIT_FOR_YELLOW_SOURCE -> UPDATE_SOURCE_MAPPINGS_PROPERTIES.');
expect(logs).toMatch('UPDATE_SOURCE_MAPPINGS_PROPERTIES -> CLEANUP_UNKNOWN_AND_EXCLUDED.');
expect(logs).toMatch('CLEANUP_UNKNOWN_AND_EXCLUDED -> FATAL.');
});
it('proceeds if there are no unknown documents', async () => {
const { client, runMigrations } = await getIdenticalMappingsMigrator();
const { client, runMigrations } = await getUpToDateMigratorTestKit();
await runMigrations();
@ -183,7 +158,7 @@ describe('when upgrading to a new stack version', () => {
expect(logs).toMatch('CHECK_VERSION_INDEX_READY_ACTIONS -> DONE.');
const indexContents = await client.search({ index: defaultKibanaIndex, size: 100 });
expect(indexContents.hits.hits.length).toEqual(8);
expect(indexContents.hits.hits.length).toEqual(25);
});
});
});
@ -193,10 +168,10 @@ describe('when upgrading to a new stack version', () => {
let indexContents: SearchResponse<{ type: string }, Record<string, AggregationsAggregate>>;
beforeAll(async () => {
esClient = await createBaseline();
esClient = await createBaseline({ documentsPerType: 10 });
await clearLog();
const { client, runMigrations } = await getCompatibleMappingsMigrator({
const { client, runMigrations } = await getCompatibleMigratorTestKit({
filterDeprecated: true, // remove the 'deprecated' type from the mappings, so that it is considered unknown
settings: {
migrations: {
@ -243,7 +218,7 @@ describe('when upgrading to a new stack version', () => {
describe('CLEANUP_UNKNOWN_AND_EXCLUDED', () => {
it('preserves documents with known types', async () => {
expect(countResultsByType(indexContents, 'basic')).toEqual(3);
expect(countResultsByType(indexContents, 'basic')).toEqual(10);
});
it('deletes documents with unknown types', async () => {
@ -259,24 +234,19 @@ describe('when upgrading to a new stack version', () => {
(result) => result._source?.type === 'complex'
);
expect(complexDocuments.length).toEqual(2);
expect(complexDocuments[0]._source).toEqual(
expect.objectContaining({
complex: {
name: 'complex-baz',
value: 2,
},
type: 'complex',
})
);
expect(complexDocuments[1]._source).toEqual(
expect.objectContaining({
complex: {
name: 'complex-lipsum',
value: 3,
},
type: 'complex',
})
expect(complexDocuments.length).toEqual(5);
complexDocuments.forEach(({ _source }, value) =>
expect(_source).toEqual(
expect.objectContaining({
complex: {
name: `complex-${value}`,
firstHalf: true,
value,
},
type: 'complex',
})
)
);
});
});
@ -284,7 +254,7 @@ describe('when upgrading to a new stack version', () => {
describe('and discardUnknownObjects = false', () => {
beforeAll(async () => {
esClient = await createBaseline();
esClient = await createBaseline({ documentsPerType: 10 });
});
afterAll(async () => {
await esClient?.indices.delete({ index: `${defaultKibanaIndex}_${currentVersion}_001` });
@ -293,33 +263,8 @@ describe('when upgrading to a new stack version', () => {
await clearLog();
});
it('fails if unknown documents exist', async () => {
const { runMigrations } = await getCompatibleMappingsMigrator({
filterDeprecated: true, // remove the 'deprecated' type from the mappings, so that it is considered unknown
});
try {
await runMigrations();
} catch (err) {
const errorMessage = err.message;
expect(errorMessage).toMatch(
'Unable to complete saved object migrations for the [.kibana_migrator_tests] index: Migration failed because some documents were found which use unknown saved object types:'
);
expect(errorMessage).toMatch(
'To proceed with the migration you can configure Kibana to discard unknown saved objects for this migration.'
);
expect(errorMessage).toMatch(/deprecated:.*\(type: "deprecated"\)/);
}
const logs = await readLog();
expect(logs).toMatch('INIT -> WAIT_FOR_YELLOW_SOURCE.');
expect(logs).toMatch('WAIT_FOR_YELLOW_SOURCE -> UPDATE_SOURCE_MAPPINGS_PROPERTIES.'); // this step is run only if mappings are compatible but NOT equal
expect(logs).toMatch('UPDATE_SOURCE_MAPPINGS_PROPERTIES -> CLEANUP_UNKNOWN_AND_EXCLUDED.');
expect(logs).toMatch('CLEANUP_UNKNOWN_AND_EXCLUDED -> FATAL.');
});
it('proceeds if there are no unknown documents', async () => {
const { client, runMigrations } = await getCompatibleMappingsMigrator();
const { client, runMigrations } = await getCompatibleMigratorTestKit();
await runMigrations();
@ -341,14 +286,14 @@ describe('when upgrading to a new stack version', () => {
const indexContents = await client.search({ index: defaultKibanaIndex, size: 100 });
expect(indexContents.hits.hits.length).toEqual(8);
expect(indexContents.hits.hits.length).toEqual(25);
});
});
});
describe('if the mappings do NOT match (diffMappings() === true) and they are NOT compatible', () => {
beforeAll(async () => {
esClient = await createBaseline();
esClient = await createBaseline({ documentsPerType: 10 });
});
afterAll(async () => {
await esClient?.indices.delete({ index: `${defaultKibanaIndex}_${currentVersion}_001` });
@ -358,7 +303,7 @@ describe('when upgrading to a new stack version', () => {
});
it('the migrator does not skip reindexing', async () => {
const { client, runMigrations } = await getIncompatibleMappingsMigrator();
const { client, runMigrations } = await getReindexingMigratorTestKit();
await runMigrations();
@ -375,15 +320,16 @@ describe('when upgrading to a new stack version', () => {
expect(logs).toMatch('CHECK_VERSION_INDEX_READY_ACTIONS -> MARK_VERSION_INDEX_READY.');
expect(logs).toMatch('MARK_VERSION_INDEX_READY -> DONE');
const indexContents: SearchResponse<
{ type: string },
Record<string, AggregationsAggregate>
> = await client.search({ index: defaultKibanaIndex, size: 100 });
expect(indexContents.hits.hits.length).toEqual(8); // we're removing a couple of 'complex' (value < = 1)
// double-check that the deprecated documents have not been deleted
expect(countResultsByType(indexContents, 'deprecated')).toEqual(3);
const counts = await getAggregatedTypesCount(client);
// for 'complex' objects, we discard second half and also multiples of 100
expect(counts).toMatchInlineSnapshot(`
Object {
"basic": 10,
"complex": 4,
"deprecated": 10,
"task": 10,
}
`);
});
});
});

View file

@ -7,114 +7,64 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import fs from 'fs/promises';
import { SemVer } from 'semver';
import { Env } from '@kbn/config';
import { getEnvOptions } from '@kbn/config-mocks';
import { REPO_ROOT } from '@kbn/repo-info';
import { join } from 'path';
import { readFile, unlink } from 'fs/promises';
import { type TestElasticsearchUtils } from '@kbn/core-test-helpers-kbn-server';
import type { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import type { SavedObjectsBulkCreateObject } from '@kbn/core-saved-objects-api-server';
import { DEFAULT_INDEX_TYPES_MAP } from '@kbn/core-saved-objects-base-server-internal';
import {
defaultLogFilePath,
getAggregatedTypesCount,
getEsClient,
getKibanaMigratorTestKit,
nextMinor,
startElasticsearch,
} from '../kibana_migrator_test_kit';
import { baselineTypes } from './active_delete.fixtures';
import { createBaselineArchive } from '../kibana_migrator_archive_utils';
import { getUpToDateMigratorTestKit } from '../kibana_migrator_test_kit.fixtures';
import {
BASELINE_TEST_ARCHIVE_500K,
BASELINE_DOCUMENTS_PER_TYPE_500K,
} from '../kibana_migrator_archive_utils';
const PARALLEL_MIGRATORS = 6;
const DOCUMENTS_PER_TYPE = 250000;
const kibanaIndex = '.kibana_migrator_tests';
const currentVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
const nextMinor = new SemVer(currentVersion).inc('minor').format();
const dataArchive = Path.join(__dirname, '..', 'archives', '1m_dummy_so.zip');
jest.setTimeout(24 * 3600 * 100);
describe('multiple migrator instances running in parallel', () => {
it.skip('enable and focus this test (it.skip => fit), and run it, in order to create a baseline archive', async () => {
// generate DOCUMENTS_PER_TYPE documents of each type
const documents: SavedObjectsBulkCreateObject[] = ['server', 'basic', 'deprecated', 'complex']
.map((type) =>
new Array(DOCUMENTS_PER_TYPE).fill(true).map((_, index) => ({
type,
attributes: {
name: `${type}-${++index}`,
...(type === 'complex' && { value: index }),
},
}))
)
.flat();
await createBaselineArchive({ kibanaIndex, types: baselineTypes, documents, dataArchive });
});
describe('when upgrading to a new stack version with matching mappings', () => {
let esServer: TestElasticsearchUtils['es'];
let esClient: ElasticsearchClient;
beforeAll(async () => {
esServer = await startElasticsearch({ dataArchive });
esServer = await startElasticsearch({ dataArchive: BASELINE_TEST_ARCHIVE_500K });
esClient = await getEsClient();
await fs.unlink(defaultLogFilePath).catch(() => {});
await unlink(defaultLogFilePath).catch(() => {});
for (let i = 0; i < PARALLEL_MIGRATORS; ++i) {
await fs.unlink(Path.join(__dirname, `active_delete_instance_${i}.log`)).catch(() => {});
await unlink(join(__dirname, `active_delete_instance_${i}.log`)).catch(() => {});
}
});
it('will actively delete and successfully complete migration', async () => {
it('will actively delete and sccessfully complete migration', async () => {
const startTime = Date.now();
const types = baselineTypes
.filter((type) => type.name !== 'deprecated')
.map((type) => {
if (type.name !== 'complex') {
return type;
}
return {
...type,
excludeOnUpgrade: () => {
return {
bool: {
must: [
{ term: { type: 'complex' } },
{ range: { 'complex.value': { lte: 125000 } } },
],
},
};
},
};
});
const beforeCleanup = await getAggregatedTypesCount();
expect(beforeCleanup.server).toEqual(DOCUMENTS_PER_TYPE);
expect(beforeCleanup.basic).toEqual(DOCUMENTS_PER_TYPE);
expect(beforeCleanup.deprecated).toEqual(DOCUMENTS_PER_TYPE);
expect(beforeCleanup.complex).toEqual(DOCUMENTS_PER_TYPE);
const beforeCleanup = await getAggregatedTypesCount(esClient);
expect(beforeCleanup.server).toEqual(BASELINE_DOCUMENTS_PER_TYPE_500K);
expect(beforeCleanup.basic).toEqual(BASELINE_DOCUMENTS_PER_TYPE_500K);
expect(beforeCleanup.deprecated).toEqual(BASELINE_DOCUMENTS_PER_TYPE_500K);
expect(beforeCleanup.complex).toEqual(BASELINE_DOCUMENTS_PER_TYPE_500K);
expect(beforeCleanup.task).toEqual(BASELINE_DOCUMENTS_PER_TYPE_500K);
const testKits = await Promise.all(
new Array(PARALLEL_MIGRATORS)
.fill({
filterDeprecated: true,
settings: {
migrations: {
discardUnknownObjects: nextMinor,
},
},
kibanaIndex,
types,
kibanaVersion: nextMinor,
})
.map((config, index) =>
getKibanaMigratorTestKit({
getUpToDateMigratorTestKit({
...config,
logFilePath: Path.join(__dirname, `active_delete_instance_${index}.log`),
defaultIndexTypesMap: DEFAULT_INDEX_TYPES_MAP,
logFilePath: join(__dirname, `active_delete_instance_${index}.log`),
})
)
);
@ -123,10 +73,7 @@ describe('multiple migrator instances running in parallel', () => {
expect(results.flat().every((result) => result.status === 'migrated')).toEqual(true);
for (let i = 0; i < PARALLEL_MIGRATORS; ++i) {
const logs = await fs.readFile(
Path.join(__dirname, `active_delete_instance_${i}.log`),
'utf-8'
);
const logs = await readFile(join(__dirname, `active_delete_instance_${i}.log`), 'utf-8');
expect(logs).toMatch('CHECK_VERSION_INDEX_READY_ACTIONS -> DONE');
expect(logs).toMatch('Migration completed');
}
@ -136,52 +83,16 @@ describe('multiple migrator instances running in parallel', () => {
console.debug(`Migration took: ${(endTime - startTime) / 1000} seconds`);
// After cleanup
const afterCleanup = await getAggregatedTypesCount();
const afterCleanup = await getAggregatedTypesCount(testKits[0].client);
expect(afterCleanup.server).not.toBeDefined(); // 'server' is part of the REMOVED_TYPES
expect(afterCleanup.basic).toEqual(DOCUMENTS_PER_TYPE); // we keep 'basic' SOs
expect(afterCleanup.basic).toEqual(BASELINE_DOCUMENTS_PER_TYPE_500K); // we keep 'basic' SOs
expect(afterCleanup.deprecated).not.toBeDefined(); // 'deprecated' is no longer present in nextMinor's mappings
expect(afterCleanup.complex).toEqual(DOCUMENTS_PER_TYPE / 2); // we excludeFromUpgrade half of them with a hook
expect(afterCleanup.complex).toEqual(BASELINE_DOCUMENTS_PER_TYPE_500K / 2); // we excludeFromUpgrade half of them with a hook
expect(afterCleanup.task).toEqual(BASELINE_DOCUMENTS_PER_TYPE_500K); // 'task' SO are on a dedicated index
});
afterAll(async () => {
// await esClient?.indices.delete({ index: `${kibanaIndex}_${currentVersion}_001` });
await esServer?.stop();
});
const getAggregatedTypesCount = async () => {
await esClient.indices.refresh();
const response = await esClient.search<unknown, { typesAggregation: { buckets: any[] } }>({
index: kibanaIndex,
_source: false,
aggs: {
typesAggregation: {
terms: {
// assign type __UNKNOWN__ to those documents that don't define one
missing: '__UNKNOWN__',
field: 'type',
size: 10,
},
aggs: {
docs: {
top_hits: {
size: 2,
_source: {
excludes: ['*'],
},
},
},
},
},
},
});
return (response.aggregations!.typesAggregation.buckets as unknown as any).reduce(
(acc: any, current: any) => {
acc[current.key] = current.doc_count;
return acc;
},
{}
);
};
});
});

View file

@ -1,399 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import type { TestElasticsearchUtils } from '@kbn/core-test-helpers-kbn-server';
import {
type ISavedObjectTypeRegistry,
type SavedObjectsType,
MAIN_SAVED_OBJECT_INDEX,
ALL_SAVED_OBJECT_INDICES,
} from '@kbn/core-saved-objects-server';
import { DEFAULT_INDEX_TYPES_MAP } from '@kbn/core-saved-objects-base-server-internal';
import {
clearLog,
startElasticsearch,
getKibanaMigratorTestKit,
getCurrentVersionTypeRegistry,
overrideTypeRegistry,
getAggregatedTypesCount,
currentVersion,
type KibanaMigratorTestKit,
getEsClient,
getAggregatedTypesCountAllIndices,
} from '../kibana_migrator_test_kit';
import { delay, parseLogFile } from '../test_utils';
import '../jest_matchers';
// define a type => index distribution
const RELOCATE_TYPES: Record<string, string> = {
dashboard: '.kibana_so_ui',
visualization: '.kibana_so_ui',
'canvas-workpad': '.kibana_so_ui',
search: '.kibana_so_search',
task: '.kibana_task_manager',
// the remaining types will be forced to go to '.kibana',
// overriding `indexPattern: foo` defined in the registry
};
const PARALLEL_MIGRATORS = 6;
export const logFilePathFirstRun = Path.join(__dirname, 'dot_kibana_split_1st_run.test.log');
export const logFilePathSecondRun = Path.join(__dirname, 'dot_kibana_split_2nd_run.test.log');
describe('split .kibana index into multiple system indices', () => {
let esServer: TestElasticsearchUtils['es'];
let typeRegistry: ISavedObjectTypeRegistry;
beforeAll(async () => {
typeRegistry = await getCurrentVersionTypeRegistry({ oss: false });
});
beforeEach(async () => {
await clearLog(logFilePathFirstRun);
await clearLog(logFilePathSecondRun);
});
describe('when migrating from a legacy version', () => {
let migratorTestKitFactory: (logFilePath: string) => Promise<KibanaMigratorTestKit>;
beforeAll(async () => {
esServer = await startElasticsearch({
dataArchive: Path.join(__dirname, '..', 'archives', '7.3.0_xpack_sample_saved_objects.zip'),
timeout: 60000,
});
});
it('performs v1 migration and then relocates saved objects into different indices, depending on their types', async () => {
const updatedTypeRegistry = overrideTypeRegistry(
typeRegistry,
(type: SavedObjectsType<any>) => {
return {
...type,
indexPattern: RELOCATE_TYPES[type.name] ?? MAIN_SAVED_OBJECT_INDEX,
};
}
);
migratorTestKitFactory = (logFilePath: string) =>
getKibanaMigratorTestKit({
types: updatedTypeRegistry.getAllTypes(),
kibanaIndex: '.kibana',
logFilePath,
defaultIndexTypesMap: DEFAULT_INDEX_TYPES_MAP,
});
const { runMigrations, client } = await migratorTestKitFactory(logFilePathFirstRun);
// count of types in the legacy index
expect(await getAggregatedTypesCount(client, '.kibana_1')).toEqual({
'canvas-workpad': 3,
config: 1,
dashboard: 3,
'index-pattern': 3,
map: 3,
'maps-telemetry': 1,
'sample-data-telemetry': 3,
search: 2,
telemetry: 1,
space: 1,
visualization: 39,
});
await runMigrations();
await client.indices.refresh({
index: ['.kibana', '.kibana_so_search', '.kibana_so_ui'],
});
expect(await getAggregatedTypesCount(client, '.kibana')).toEqual({
'index-pattern': 3,
map: 3,
'sample-data-telemetry': 3,
config: 1,
telemetry: 1,
space: 1,
});
expect(await getAggregatedTypesCount(client, '.kibana_so_search')).toEqual({
search: 2,
});
expect(await getAggregatedTypesCount(client, '.kibana_so_ui')).toEqual({
visualization: 39,
'canvas-workpad': 3,
dashboard: 3,
});
const indicesInfo = await client.indices.get({ index: '.kibana*' });
expect(indicesInfo[`.kibana_${currentVersion}_001`]).toEqual(
expect.objectContaining({
aliases: expect.objectContaining({ '.kibana': expect.any(Object) }),
mappings: {
dynamic: 'strict',
_meta: {
mappingVersions: expect.any(Object),
indexTypesMap: expect.any(Object),
},
properties: expect.any(Object),
},
settings: { index: expect.any(Object) },
})
);
expect(indicesInfo[`.kibana_so_search_${currentVersion}_001`]).toEqual(
expect.objectContaining({
aliases: expect.objectContaining({ '.kibana_so_search': expect.any(Object) }),
mappings: {
dynamic: 'strict',
_meta: {
mappingVersions: expect.any(Object),
indexTypesMap: expect.any(Object),
},
properties: expect.any(Object),
},
settings: { index: expect.any(Object) },
})
);
expect(indicesInfo[`.kibana_so_ui_${currentVersion}_001`]).toEqual(
expect.objectContaining({
aliases: expect.objectContaining({ '.kibana_so_ui': expect.any(Object) }),
mappings: {
dynamic: 'strict',
_meta: {
mappingVersions: expect.any(Object),
indexTypesMap: expect.any(Object),
},
properties: expect.any(Object),
},
settings: { index: expect.any(Object) },
})
);
const typesMap = indicesInfo[`.kibana_${currentVersion}_001`].mappings?._meta?.indexTypesMap;
expect(Array.isArray(typesMap['.kibana'])).toEqual(true);
expect(typesMap['.kibana'].length > 50).toEqual(true);
expect(typesMap['.kibana'].includes('action')).toEqual(true);
expect(typesMap['.kibana'].includes('cases')).toEqual(true);
expect(typesMap['.kibana_so_search']).toEqual(['search']);
expect(typesMap['.kibana_so_ui']).toEqual(['canvas-workpad', 'dashboard', 'visualization']);
expect(typesMap['.kibana_task_manager']).toEqual(['task']);
const logs = await parseLogFile(logFilePathFirstRun);
expect(logs).toContainLogEntries(
[
// .kibana_task_manager index exists and has no aliases => LEGACY_* migration path
'[.kibana_task_manager] INIT -> LEGACY_CHECK_CLUSTER_ROUTING_ALLOCATION.',
'[.kibana_task_manager] LEGACY_CHECK_CLUSTER_ROUTING_ALLOCATION -> LEGACY_SET_WRITE_BLOCK.',
'[.kibana_task_manager] LEGACY_REINDEX_WAIT_FOR_TASK -> LEGACY_DELETE.',
'[.kibana_task_manager] LEGACY_DELETE -> SET_SOURCE_WRITE_BLOCK.',
'[.kibana_task_manager] SET_SOURCE_WRITE_BLOCK -> CALCULATE_EXCLUDE_FILTERS.',
'[.kibana_task_manager] CALCULATE_EXCLUDE_FILTERS -> CREATE_REINDEX_TEMP.',
'[.kibana_task_manager] CREATE_REINDEX_TEMP -> REINDEX_SOURCE_TO_TEMP_OPEN_PIT.',
'[.kibana_task_manager] REINDEX_SOURCE_TO_TEMP_OPEN_PIT -> REINDEX_SOURCE_TO_TEMP_READ.',
'[.kibana_task_manager] REINDEX_SOURCE_TO_TEMP_READ -> REINDEX_SOURCE_TO_TEMP_TRANSFORM.',
'[.kibana_task_manager] REINDEX_SOURCE_TO_TEMP_TRANSFORM -> REINDEX_SOURCE_TO_TEMP_INDEX_BULK.',
'[.kibana_task_manager] REINDEX_SOURCE_TO_TEMP_INDEX_BULK -> REINDEX_SOURCE_TO_TEMP_READ.',
'[.kibana_task_manager] REINDEX_SOURCE_TO_TEMP_READ -> REINDEX_SOURCE_TO_TEMP_CLOSE_PIT.',
'[.kibana_task_manager] REINDEX_SOURCE_TO_TEMP_CLOSE_PIT -> SET_TEMP_WRITE_BLOCK.',
'[.kibana_task_manager] SET_TEMP_WRITE_BLOCK -> CLONE_TEMP_TO_TARGET.',
'[.kibana_task_manager] CLONE_TEMP_TO_TARGET -> REFRESH_TARGET.',
'[.kibana_task_manager] REFRESH_TARGET -> OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT.',
'[.kibana_task_manager] OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT -> OUTDATED_DOCUMENTS_SEARCH_READ.',
'[.kibana_task_manager] OUTDATED_DOCUMENTS_SEARCH_READ -> OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT.',
'[.kibana_task_manager] OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT -> CHECK_TARGET_MAPPINGS.',
'[.kibana_task_manager] CHECK_TARGET_MAPPINGS -> UPDATE_TARGET_MAPPINGS_PROPERTIES.',
'[.kibana_task_manager] UPDATE_TARGET_MAPPINGS_PROPERTIES -> UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK.',
'[.kibana_task_manager] UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK -> UPDATE_TARGET_MAPPINGS_META.',
'[.kibana_task_manager] UPDATE_TARGET_MAPPINGS_META -> CHECK_VERSION_INDEX_READY_ACTIONS.',
'[.kibana_task_manager] CHECK_VERSION_INDEX_READY_ACTIONS -> MARK_VERSION_INDEX_READY.',
'[.kibana_task_manager] MARK_VERSION_INDEX_READY -> DONE.',
'[.kibana_task_manager] Migration completed after',
],
{ ordered: true }
);
expect(logs).not.toContainLogEntries([
// .kibana_task_manager migrator is NOT involved in relocation, must not sync with other migrators
'[.kibana_task_manager] READY_TO_REINDEX_SYNC',
'[.kibana_task_manager] DONE_REINDEXING_SYNC',
// .kibana_task_manager migrator performed a REINDEX migration, it must update ALL types
'[.kibana_task_manager] Kibana is performing a compatible update and it will update the following SO types so that ES can pickup the updated mappings',
]);
// new indices migrators did not exist, so they all have to reindex (create temp index + sync)
['.kibana_so_ui', '.kibana_so_search'].forEach((newIndex) => {
expect(logs).toContainLogEntries(
[
`[${newIndex}] INIT -> CREATE_REINDEX_TEMP.`,
`[${newIndex}] CREATE_REINDEX_TEMP -> READY_TO_REINDEX_SYNC.`,
// no docs to reindex, as source index did NOT exist
`[${newIndex}] READY_TO_REINDEX_SYNC -> DONE_REINDEXING_SYNC.`,
],
{ ordered: true }
);
});
// the .kibana migrator is involved in a relocation, it must also reindex
expect(logs).toContainLogEntries(
[
'[.kibana] INIT -> WAIT_FOR_YELLOW_SOURCE.',
'[.kibana] WAIT_FOR_YELLOW_SOURCE -> CHECK_CLUSTER_ROUTING_ALLOCATION.',
'[.kibana] CHECK_CLUSTER_ROUTING_ALLOCATION -> CHECK_UNKNOWN_DOCUMENTS.',
'[.kibana] CHECK_UNKNOWN_DOCUMENTS -> SET_SOURCE_WRITE_BLOCK.',
'[.kibana] SET_SOURCE_WRITE_BLOCK -> CALCULATE_EXCLUDE_FILTERS.',
'[.kibana] CALCULATE_EXCLUDE_FILTERS -> CREATE_REINDEX_TEMP.',
'[.kibana] CREATE_REINDEX_TEMP -> READY_TO_REINDEX_SYNC.',
'[.kibana] READY_TO_REINDEX_SYNC -> REINDEX_SOURCE_TO_TEMP_OPEN_PIT.',
'[.kibana] REINDEX_SOURCE_TO_TEMP_OPEN_PIT -> REINDEX_SOURCE_TO_TEMP_READ.',
'[.kibana] Starting to process 59 documents.',
'[.kibana] REINDEX_SOURCE_TO_TEMP_READ -> REINDEX_SOURCE_TO_TEMP_TRANSFORM.',
'[.kibana] REINDEX_SOURCE_TO_TEMP_TRANSFORM -> REINDEX_SOURCE_TO_TEMP_INDEX_BULK.',
'[.kibana] REINDEX_SOURCE_TO_TEMP_INDEX_BULK -> REINDEX_SOURCE_TO_TEMP_READ.',
'[.kibana] Processed 59 documents out of 59.',
'[.kibana] REINDEX_SOURCE_TO_TEMP_READ -> REINDEX_SOURCE_TO_TEMP_CLOSE_PIT.',
'[.kibana] REINDEX_SOURCE_TO_TEMP_CLOSE_PIT -> DONE_REINDEXING_SYNC.',
],
{ ordered: true }
);
// after .kibana migrator is done relocating documents
// the 3 migrators share the final part of the flow
['.kibana', '.kibana_so_ui', '.kibana_so_search'].forEach((index) => {
expect(logs).toContainLogEntries(
[
`[${index}] DONE_REINDEXING_SYNC -> SET_TEMP_WRITE_BLOCK.`,
`[${index}] SET_TEMP_WRITE_BLOCK -> CLONE_TEMP_TO_TARGET.`,
`[${index}] CLONE_TEMP_TO_TARGET -> REFRESH_TARGET.`,
`[${index}] REFRESH_TARGET -> OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT.`,
`[${index}] OUTDATED_DOCUMENTS_SEARCH_OPEN_PIT -> OUTDATED_DOCUMENTS_SEARCH_READ.`,
`[${index}] OUTDATED_DOCUMENTS_SEARCH_READ -> OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT.`,
`[${index}] OUTDATED_DOCUMENTS_SEARCH_CLOSE_PIT -> CHECK_TARGET_MAPPINGS.`,
`[${index}] CHECK_TARGET_MAPPINGS -> UPDATE_TARGET_MAPPINGS_PROPERTIES.`,
`[${index}] UPDATE_TARGET_MAPPINGS_PROPERTIES -> UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK.`,
`[${index}] UPDATE_TARGET_MAPPINGS_PROPERTIES_WAIT_FOR_TASK -> UPDATE_TARGET_MAPPINGS_META.`,
`[${index}] UPDATE_TARGET_MAPPINGS_META -> CHECK_VERSION_INDEX_READY_ACTIONS.`,
`[${index}] CHECK_VERSION_INDEX_READY_ACTIONS -> MARK_VERSION_INDEX_READY_SYNC.`,
`[${index}] MARK_VERSION_INDEX_READY_SYNC`, // all migrators try to update all aliases, all but one will have conclicts
`[${index}] Migration completed after`,
],
{ ordered: true }
);
});
// should NOT retransform anything (we reindexed, thus we transformed already)
['.kibana', '.kibana_task_manager', '.kibana_so_ui', '.kibana_so_search'].forEach((index) => {
expect(logs).not.toContainLogEntry(`[${index}] OUTDATED_DOCUMENTS_TRANSFORM`);
expect(logs).not.toContainLogEntry(
`[${index}] Kibana is performing a compatible update and it will update the following SO types so that ES can pickup the updated mappings`
);
});
});
afterEach(async () => {
// we run the migrator again to ensure that the next time state is loaded everything still works as expected
const { runMigrations } = await migratorTestKitFactory(logFilePathSecondRun);
await runMigrations();
const logs = await parseLogFile(logFilePathSecondRun);
expect(logs).not.toContainLogEntries(['REINDEX', 'CREATE', 'UPDATE_TARGET_MAPPINGS']);
});
afterAll(async () => {
await esServer?.stop();
await delay(2);
});
});
describe('when multiple Kibana migrators run in parallel', () => {
jest.setTimeout(1200000);
it('correctly migrates 7.7.2_xpack_100k_obj.zip archive', async () => {
esServer = await startElasticsearch({
dataArchive: Path.join(__dirname, '..', 'archives', '7.7.2_xpack_100k_obj.zip'),
});
const esClient = await getEsClient();
const breakdownBefore = await getAggregatedTypesCountAllIndices(esClient);
expect(breakdownBefore).toEqual({
'.kibana': {
'apm-telemetry': 1,
application_usage_transactional: 4,
config: 1,
dashboard: 52994,
'index-pattern': 1,
'maps-telemetry': 1,
search: 1,
space: 1,
'ui-metric': 5,
visualization: 53004,
},
'.kibana_task_manager': {
task: 5,
},
});
for (let i = 0; i < PARALLEL_MIGRATORS; ++i) {
await clearLog(Path.join(__dirname, `dot_kibana_split_instance_${i}.log`));
}
const testKits = await Promise.all(
new Array(PARALLEL_MIGRATORS).fill(true).map((_, index) =>
getKibanaMigratorTestKit({
settings: {
migrations: {
discardUnknownObjects: currentVersion,
discardCorruptObjects: currentVersion,
},
},
kibanaIndex: MAIN_SAVED_OBJECT_INDEX,
types: typeRegistry.getAllTypes(),
defaultIndexTypesMap: DEFAULT_INDEX_TYPES_MAP,
logFilePath: Path.join(__dirname, `dot_kibana_split_instance_${index}.log`),
})
)
);
const results = await Promise.all(testKits.map((testKit) => testKit.runMigrations()));
expect(
results
.flat()
.every((result) => result.status === 'migrated' || result.status === 'patched')
).toEqual(true);
await esClient.indices.refresh({ index: ALL_SAVED_OBJECT_INDICES });
const breakdownAfter = await getAggregatedTypesCountAllIndices(esClient);
expect(breakdownAfter).toEqual({
'.kibana': {
'apm-telemetry': 1,
config: 1,
space: 1,
'ui-metric': 5,
},
'.kibana_alerting_cases': {},
'.kibana_analytics': {
dashboard: 52994,
'index-pattern': 1,
search: 1,
visualization: 53004,
},
'.kibana_ingest': {},
'.kibana_security_solution': {},
'.kibana_task_manager': {
task: 5,
},
'.kibana_usage_counters': {},
});
});
afterEach(async () => {
await esServer?.stop();
await delay(2);
});
});
});

View file

@ -9,15 +9,14 @@
import Path from 'path';
import type { TestElasticsearchUtils } from '@kbn/core-test-helpers-kbn-server';
import { clearLog, defaultKibanaIndex, startElasticsearch } from '../kibana_migrator_test_kit';
import {
clearLog,
createBaseline,
defaultKibanaIndex,
getCompatibleMappingsMigrator,
getIdenticalMappingsMigrator,
getIncompatibleMappingsMigrator,
startElasticsearch,
} from '../kibana_migrator_test_kit';
getCompatibleMigratorTestKit,
getUpToDateMigratorTestKit,
getReindexingMigratorTestKit,
} from '../kibana_migrator_test_kit.fixtures';
import '../jest_matchers';
import { delay, parseLogFile } from '../test_utils';
@ -37,22 +36,24 @@ describe('pickupUpdatedMappings', () => {
describe('when performing a reindexing migration', () => {
it('should pickup all documents from the index', async () => {
const { runMigrations } = await getIncompatibleMappingsMigrator({ logFilePath });
const { runMigrations } = await getReindexingMigratorTestKit({ logFilePath });
await runMigrations();
const logs = await parseLogFile(logFilePath);
expect(logs).not.toContainLogEntry('Documents of the following SO types will be updated');
expect(logs).not.toContainLogEntry(
'There are no changes in the mappings of any of the SO types, skipping UPDATE_TARGET_MAPPINGS steps.'
`[${defaultKibanaIndex}] Documents of the following SO types will be updated`
);
expect(logs).not.toContainLogEntry(
`[${defaultKibanaIndex}] There are no changes in the mappings of any of the SO types, skipping UPDATE_TARGET_MAPPINGS steps.`
);
});
});
describe('when performing a compatible migration', () => {
it('should pickup only the types that have been updated', async () => {
const { runMigrations } = await getCompatibleMappingsMigrator({ logFilePath });
const { runMigrations } = await getCompatibleMigratorTestKit({ logFilePath });
await runMigrations();
@ -64,7 +65,7 @@ describe('pickupUpdatedMappings', () => {
});
it('should NOT pickup any documents if only root fields have been updated', async () => {
const { runMigrations, client } = await getIdenticalMappingsMigrator({ logFilePath });
const { runMigrations, client } = await getUpToDateMigratorTestKit({ logFilePath });
// we tamper the baseline mappings to simulate some root fields changes
const baselineMappings = await client.indices.getMapping({ index: defaultKibanaIndex });

View file

@ -12,16 +12,18 @@ import { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import {
readLog,
clearLog,
createBaseline,
currentVersion,
defaultKibanaIndex,
getCompatibleMappingsMigrator,
getIdenticalMappingsMigrator,
getIncompatibleMappingsMigrator,
startElasticsearch,
KibanaMigratorTestKit,
} from '../kibana_migrator_test_kit';
import {
createBaseline,
getCompatibleMigratorTestKit,
getUpToDateMigratorTestKit,
} from '../kibana_migrator_test_kit.fixtures';
describe('when migrating to a new version', () => {
let esServer: TestElasticsearchUtils['es'];
let esClient: ElasticsearchClient;
@ -39,7 +41,7 @@ describe('when migrating to a new version', () => {
describe('and the mappings remain the same', () => {
it('the migrator skips reindexing', async () => {
// we run the migrator with the same identic baseline types
migratorTestKitFactory = () => getIdenticalMappingsMigrator();
migratorTestKitFactory = () => getUpToDateMigratorTestKit();
const testKit = await migratorTestKitFactory();
await testKit.runMigrations();
@ -68,7 +70,7 @@ describe('when migrating to a new version', () => {
describe("and the mappings' changes are still compatible", () => {
it('the migrator skips reindexing', async () => {
// we run the migrator with altered, compatible mappings
migratorTestKitFactory = () => getCompatibleMappingsMigrator();
migratorTestKitFactory = () => getCompatibleMigratorTestKit();
const testKit = await migratorTestKitFactory();
await testKit.runMigrations();
@ -94,31 +96,6 @@ describe('when migrating to a new version', () => {
});
});
describe("and the mappings' changes are NOT compatible", () => {
it('the migrator reindexes documents to a new index', async () => {
// we run the migrator with incompatible mappings
migratorTestKitFactory = () => getIncompatibleMappingsMigrator();
const testKit = await migratorTestKitFactory();
await testKit.runMigrations();
const logs = await readLog();
expect(logs).toMatch('INIT -> WAIT_FOR_YELLOW_SOURCE.');
expect(logs).toMatch('WAIT_FOR_YELLOW_SOURCE -> UPDATE_SOURCE_MAPPINGS_PROPERTIES.');
expect(logs).toMatch(
'UPDATE_SOURCE_MAPPINGS_PROPERTIES -> CHECK_CLUSTER_ROUTING_ALLOCATION.'
);
expect(logs).toMatch('CHECK_CLUSTER_ROUTING_ALLOCATION -> CHECK_UNKNOWN_DOCUMENTS.');
expect(logs).toMatch('CHECK_TARGET_MAPPINGS -> UPDATE_TARGET_MAPPINGS_PROPERTIES.');
expect(logs).toMatch('UPDATE_TARGET_MAPPINGS_META -> CHECK_VERSION_INDEX_READY_ACTIONS.');
expect(logs).toMatch('CHECK_VERSION_INDEX_READY_ACTIONS -> MARK_VERSION_INDEX_READY.');
expect(logs).toMatch('MARK_VERSION_INDEX_READY -> DONE.');
expect(logs).not.toMatch('CREATE_NEW_TARGET');
expect(logs).not.toMatch('CLEANUP_UNKNOWN_AND_EXCLUDED');
expect(logs).not.toMatch('PREPARE_COMPATIBLE_MIGRATION');
});
});
afterEach(async () => {
// we run the migrator again to ensure that the next time state is loaded everything still works as expected
const migratorTestKit = await migratorTestKitFactory();

View file

@ -7,34 +7,29 @@
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import Path from 'path';
import { join } from 'path';
import type { TestElasticsearchUtils } from '@kbn/core-test-helpers-kbn-server';
import {
type ISavedObjectTypeRegistry,
MAIN_SAVED_OBJECT_INDEX,
} from '@kbn/core-saved-objects-server';
import { DEFAULT_INDEX_TYPES_MAP } from '@kbn/core-saved-objects-base-server-internal';
import {
clearLog,
nextMinor,
startElasticsearch,
getKibanaMigratorTestKit,
getCurrentVersionTypeRegistry,
currentVersion,
defaultKibanaIndex,
defaultKibanaTaskIndex,
} from '../kibana_migrator_test_kit';
import { delay } from '../test_utils';
import '../jest_matchers';
import { getElasticsearchClientWrapperFactory } from '../elasticsearch_client_wrapper';
import { BASELINE_TEST_ARCHIVE_1K } from '../kibana_migrator_archive_utils';
import {
getRelocatingMigratorTestKit,
kibanaSplitIndex,
} from '../kibana_migrator_test_kit.fixtures';
export const logFilePathFirstRun = Path.join(__dirname, 'dot_kibana_split_1st_run.test.log');
export const logFilePathSecondRun = Path.join(__dirname, 'dot_kibana_split_2nd_run.test.log');
export const logFilePathFirstRun = join(__dirname, 'single_migrator_failures_1st_run.test.log');
export const logFilePathSecondRun = join(__dirname, 'single_migrator_failures_2nd_run.test.log');
describe('split .kibana index into multiple system indices', () => {
let esServer: TestElasticsearchUtils['es'];
let typeRegistry: ISavedObjectTypeRegistry;
beforeAll(async () => {
typeRegistry = await getCurrentVersionTypeRegistry({ oss: false });
});
beforeEach(async () => {
await clearLog(logFilePathFirstRun);
@ -56,29 +51,23 @@ describe('split .kibana index into multiple system indices', () => {
errorDelaySeconds: delaySeconds,
});
return await getKibanaMigratorTestKit({
types: typeRegistry.getAllTypes(),
kibanaIndex: MAIN_SAVED_OBJECT_INDEX,
defaultIndexTypesMap: DEFAULT_INDEX_TYPES_MAP,
logFilePath,
clientWrapperFactory,
});
return await getRelocatingMigratorTestKit({ logFilePath, clientWrapperFactory });
};
beforeEach(async () => {
esServer = await startElasticsearch({
dataArchive: Path.join(__dirname, '..', 'archives', '7.7.2_xpack_100k_obj.zip'),
dataArchive: BASELINE_TEST_ARCHIVE_1K,
});
});
describe('when the .kibana_task_manager migrator fails on the TRANSFORMED_DOCUMENTS_BULK_INDEX state, after the other ones have finished', () => {
it('is capable of completing the .kibana_task_manager migration in subsequent restart', async () => {
describe(`when the ${defaultKibanaTaskIndex} migrator fails on the TRANSFORMED_DOCUMENTS_BULK_INDEX state, after the other ones have finished`, () => {
it(`is capable of completing the ${defaultKibanaTaskIndex} migration in subsequent restart`, async () => {
const { runMigrations: firstRun } = await getFailingKibanaMigratorTestKit({
logFilePath: logFilePathFirstRun,
failOn: (methodName, methodArgs) => {
// fail on esClient.bulk({ index: '.kibana_task_manager_1' }) which supposedly causes
// the .kibana_task_manager migrator to fail on the TRANSFORMED_DOCUMENTS_BULK_INDEX state
return methodName === 'bulk' && methodArgs[0].index === '.kibana_task_manager_1';
// fail on esClient.bulk({ index: '.kibana_migrator_tasks' }) which supposedly causes
// the .kibana_migrator_tasks migrator to fail on the TRANSFORMED_DOCUMENTS_BULK_INDEX state
return methodName === 'bulk' && methodArgs[0].index.startsWith(defaultKibanaTaskIndex);
},
delaySeconds: 90, // give the other migrators enough time to finish before failing
});
@ -88,25 +77,25 @@ describe('split .kibana index into multiple system indices', () => {
throw new Error('First run should have thrown an error but it did not');
} catch (error) {
expect(error.message).toEqual(
'Unable to complete saved object migrations for the [.kibana_task_manager] index. Error: esClient.bulk() failed unexpectedly'
`Unable to complete saved object migrations for the [${defaultKibanaTaskIndex}] index. Error: esClient.bulk() failed unexpectedly`
);
}
});
});
describe('when the .kibana migrator fails on the REINDEX_SOURCE_TO_TEMP_INDEX_BULK state', () => {
describe(`when the ${defaultKibanaIndex} migrator fails on the REINDEX_SOURCE_TO_TEMP_INDEX_BULK state`, () => {
it('is capable of successfully performing the split migration in subsequent restart', async () => {
const { runMigrations: firstRun } = await getFailingKibanaMigratorTestKit({
logFilePath: logFilePathFirstRun,
failOn: (methodName, methodArgs) => {
// fail on esClient.bulk({ index: '.kibana_8.11.0_reindex_temp_alias' }) which supposedly causes
// fail on esClient.bulk({ index: '.kibana_migrator_8.11.0_reindex_temp_alias' }) which supposedly causes
// the .kibana migrator to fail on the REINDEX_SOURCE_TO_TEMP_INDEX_BULK
return (
methodName === 'bulk' &&
methodArgs[0].index === `.kibana_${currentVersion}_reindex_temp_alias`
methodArgs[0].index === `${defaultKibanaIndex}_${nextMinor}_reindex_temp_alias`
);
},
delaySeconds: 10, // give the .kibana_task_manager migrator enough time to finish before failing
delaySeconds: 10, // give the .kibana_migrator_tasks migrator enough time to finish before failing
});
try {
@ -114,23 +103,23 @@ describe('split .kibana index into multiple system indices', () => {
throw new Error('First run should have thrown an error but it did not');
} catch (error) {
expect(error.message).toEqual(
'Unable to complete saved object migrations for the [.kibana] index. Error: esClient.bulk() failed unexpectedly'
`Unable to complete saved object migrations for the [${defaultKibanaIndex}] index. Error: esClient.bulk() failed unexpectedly`
);
}
});
});
describe('when the .kibana migrator fails on the CLONE_TEMP_TO_TARGET state', () => {
describe(`when the ${defaultKibanaIndex} migrator fails on the CLONE_TEMP_TO_TARGET state`, () => {
it('is capable of successfully performing the split migration in subsequent restart', async () => {
const { runMigrations: firstRun } = await getFailingKibanaMigratorTestKit({
logFilePath: logFilePathFirstRun,
failOn: (methodName, methodArgs) => {
// fail on esClient.indices.clone({ index: '.kibana_8.11.0_reindex_temp', target: ... }) which supposedly causes
// the .kibana migrator to fail on the CLONE_TEMP_TO_TARGET
// fail on esClient.indices.clone({ index: '.kibana_migrator_8.11.0_reindex_temp', target: ... }) which supposedly causes
// the .kibana_migrator migrator to fail on the CLONE_TEMP_TO_TARGET
return (
methodName === 'indices.clone' &&
methodArgs[0].index === `.kibana_${currentVersion}_reindex_temp` &&
methodArgs[0].target === `.kibana_${currentVersion}_001`
methodArgs[0].index === `${defaultKibanaIndex}_${nextMinor}_reindex_temp` &&
methodArgs[0].target === `${defaultKibanaIndex}_${nextMinor}_001`
);
},
delaySeconds: 15, // give the other migrators enough time to finish before failing
@ -141,22 +130,22 @@ describe('split .kibana index into multiple system indices', () => {
throw new Error('First run should have thrown an error but it did not');
} catch (error) {
expect(error.message).toEqual(
'Unable to complete saved object migrations for the [.kibana] index. Error: esClient.indices.clone() failed unexpectedly'
`Unable to complete saved object migrations for the [${defaultKibanaIndex}] index. Error: esClient.indices.clone() failed unexpectedly`
);
}
});
});
describe('when the .kibana migrator fails on the UPDATE_TARGET_MAPPINGS_PROPERTIES state', () => {
describe(`when the ${defaultKibanaIndex} migrator fails on the UPDATE_TARGET_MAPPINGS_PROPERTIES state`, () => {
it('is capable of successfully performing the split migration in subsequent restart', async () => {
const { runMigrations: firstRun } = await getFailingKibanaMigratorTestKit({
logFilePath: logFilePathFirstRun,
failOn: (methodName, methodArgs) => {
// fail on esClient.updateByQuery({ index: '.kibana_8.11.0_001' }) which supposedly causes
// the .kibana migrator to fail on the UPDATE_TARGET_MAPPINGS_PROPERTIES (pickup mappings' changes)
// fail on esClient.updateByQuery({ index: '.kibana_migrator_8.11.0_001' }) which supposedly causes
// the .kibana_migrator migrator to fail on the UPDATE_TARGET_MAPPINGS_PROPERTIES (pickup mappings' changes)
return (
methodName === 'updateByQuery' &&
methodArgs[0].index === `.kibana_${currentVersion}_001`
methodArgs[0].index === `${defaultKibanaIndex}_${nextMinor}_001`
);
},
delaySeconds: 10, // give the other migrators enough time to finish before failing
@ -167,13 +156,13 @@ describe('split .kibana index into multiple system indices', () => {
throw new Error('First run should have thrown an error but it did not');
} catch (error) {
expect(error.message).toEqual(
'Unable to complete saved object migrations for the [.kibana] index. Error: esClient.updateByQuery() failed unexpectedly'
`Unable to complete saved object migrations for the [${defaultKibanaIndex}] index. Error: esClient.updateByQuery() failed unexpectedly`
);
}
});
});
describe('when the .kibana_analytics migrator fails on the CLONE_TEMP_TO_TARGET state', () => {
describe(`when the ${kibanaSplitIndex} migrator fails on the CLONE_TEMP_TO_TARGET state`, () => {
it('is capable of successfully performing the split migration in subsequent restart', async () => {
const { runMigrations: firstRun } = await getFailingKibanaMigratorTestKit({
logFilePath: logFilePathFirstRun,
@ -182,8 +171,8 @@ describe('split .kibana index into multiple system indices', () => {
// the .kibana migrator to fail on the CLONE_TEMP_TO_TARGET
return (
methodName === 'indices.clone' &&
methodArgs[0].index === `.kibana_analytics_${currentVersion}_reindex_temp` &&
methodArgs[0].target === `.kibana_analytics_${currentVersion}_001`
methodArgs[0].index === `${kibanaSplitIndex}_${nextMinor}_reindex_temp` &&
methodArgs[0].target === `${kibanaSplitIndex}_${nextMinor}_001`
);
},
delaySeconds: 15, // give the other migrators enough time to finish before failing
@ -194,13 +183,13 @@ describe('split .kibana index into multiple system indices', () => {
throw new Error('First run should have thrown an error but it did not');
} catch (error) {
expect(error.message).toEqual(
'Unable to complete saved object migrations for the [.kibana_analytics] index. Error: esClient.indices.clone() failed unexpectedly'
`Unable to complete saved object migrations for the [${kibanaSplitIndex}] index. Error: esClient.indices.clone() failed unexpectedly`
);
}
});
});
describe('when the .kibana_analytics migrator fails on the UPDATE_TARGET_MAPPINGS_PROPERTIES state', () => {
describe(`when the ${kibanaSplitIndex} migrator fails on the UPDATE_TARGET_MAPPINGS_PROPERTIES state`, () => {
it('is capable of successfully performing the split migration in subsequent restart', async () => {
const { runMigrations: firstRun } = await getFailingKibanaMigratorTestKit({
logFilePath: logFilePathFirstRun,
@ -209,7 +198,7 @@ describe('split .kibana index into multiple system indices', () => {
// the .kibana migrator to fail on the UPDATE_TARGET_MAPPINGS_PROPERTIES (pickup mappings' changes)
return (
methodName === 'updateByQuery' &&
methodArgs[0].index === `.kibana_analytics_${currentVersion}_001`
methodArgs[0].index === `${kibanaSplitIndex}_${nextMinor}_001`
);
},
delaySeconds: 10, // give the other migrators enough time to finish before failing
@ -220,19 +209,17 @@ describe('split .kibana index into multiple system indices', () => {
throw new Error('First run should have thrown an error but it did not');
} catch (error) {
expect(error.message).toEqual(
'Unable to complete saved object migrations for the [.kibana_analytics] index. Error: esClient.updateByQuery() failed unexpectedly'
`Unable to complete saved object migrations for the [${kibanaSplitIndex}] index. Error: esClient.updateByQuery() failed unexpectedly`
);
}
});
});
afterEach(async () => {
const { runMigrations: secondRun } = await getKibanaMigratorTestKit({
types: typeRegistry.getAllTypes(),
const { runMigrations: secondRun } = await getRelocatingMigratorTestKit({
logFilePath: logFilePathSecondRun,
kibanaIndex: MAIN_SAVED_OBJECT_INDEX,
defaultIndexTypesMap: DEFAULT_INDEX_TYPES_MAP,
});
const results = await secondRun();
expect(
results

View file

@ -9,37 +9,69 @@
/* eslint-disable no-console */
import Path from 'path';
import { join } from 'path';
import fs from 'fs/promises';
import { exec } from 'child_process';
import { promisify } from 'util';
const execPromise = promisify(exec);
import { SavedObjectsBulkCreateObject } from '@kbn/core-saved-objects-api-server';
import { SavedObjectsType } from '@kbn/core-saved-objects-server';
import { getKibanaMigratorTestKit, startElasticsearch } from './kibana_migrator_test_kit';
import {
defaultKibanaIndex,
getKibanaMigratorTestKit,
startElasticsearch,
} from './kibana_migrator_test_kit';
import { delay } from './test_utils';
import { baselineTypes, getBaselineDocuments } from './kibana_migrator_test_kit.fixtures';
const DEFAULT_BATCH_SIZE = 100000;
export const BASELINE_ELASTICSEARCH_VERSION = '8.16.0';
export const BASELINE_DOCUMENTS_PER_TYPE_1K = 200;
export const BASELINE_DOCUMENTS_PER_TYPE_500K = 100_000;
// we discard the second half with exclude on upgrade (firstHalf !== true)
// then we discard half all multiples of 100 (1% of them)
export const BASELINE_COMPLEX_DOCUMENTS_500K_AFTER =
BASELINE_DOCUMENTS_PER_TYPE_500K / 2 - BASELINE_DOCUMENTS_PER_TYPE_500K / 2 / 100;
export const BASELINE_TEST_ARCHIVE_1K = join(
__dirname,
'archives',
`${BASELINE_ELASTICSEARCH_VERSION}_baseline_${
(BASELINE_DOCUMENTS_PER_TYPE_1K * baselineTypes.length) / 1000
}k_docs.zip`
);
export const BASELINE_TEST_ARCHIVE_500K = join(
__dirname,
'archives',
`${BASELINE_ELASTICSEARCH_VERSION}_baseline_${
(BASELINE_DOCUMENTS_PER_TYPE_500K * baselineTypes.length) / 1000
}k_docs.zip`
);
const DEFAULT_BATCH_SIZE = 5000;
interface CreateBaselineArchiveParams {
kibanaIndex: string;
types: Array<SavedObjectsType<any>>;
documents: SavedObjectsBulkCreateObject[];
batchSize?: number;
esBaseFolder?: string;
dataArchive: string;
esVersion?: string;
kibanaIndex?: string;
types?: Array<SavedObjectsType<any>>;
documents?: SavedObjectsBulkCreateObject[];
batchSize?: number;
basePath?: string;
}
export const createBaselineArchive = async ({
types,
documents,
kibanaIndex,
batchSize = DEFAULT_BATCH_SIZE,
esBaseFolder = Path.join(__dirname, `target`),
dataArchive,
esVersion,
kibanaIndex = defaultKibanaIndex,
types = baselineTypes,
documents = getBaselineDocuments(),
batchSize = DEFAULT_BATCH_SIZE,
basePath = join(__dirname, `target`),
}: CreateBaselineArchiveParams) => {
const startTime = Date.now();
const esServer = await startElasticsearch({ basePath: esBaseFolder });
const esServer = await startElasticsearch({ esVersion, basePath });
const { runMigrations, savedObjectsRepository } = await getKibanaMigratorTestKit({
kibanaIndex,
@ -57,15 +89,22 @@ export const createBaselineArchive = async ({
});
}
await compressBaselineArchive(esBaseFolder, dataArchive);
// wait a bit more to make sure everything's persisted to disk
await delay(30);
await compressBaselineArchive(basePath, dataArchive);
console.log(`Archive created in: ${(Date.now() - startTime) / 1000} seconds`, dataArchive);
await delay(200);
// leave command line enough time to finish creating + closing ZIP file
await delay(30);
await esServer.stop();
// await fs.rm(esBaseFolder, { recursive: true });
await delay(10);
await fs.rm(basePath, { recursive: true, force: true });
};
const compressBaselineArchive = async (esFolder: string, archiveFile: string) => {
const dataFolder = Path.join(esFolder, 'es-test-cluster');
const cmd = `cd ${dataFolder} && zip -r ${archiveFile} data -x ".DS_Store" -x "__MACOSX"`;
const dataFolder = join(esFolder, 'es-test-cluster', 'data');
const cmd = `ditto -c -k --sequesterRsrc --keepParent ${dataFolder} ${archiveFile}`;
await execPromise(cmd);
};

View file

@ -0,0 +1,49 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
import type { ElasticsearchClient } from '@kbn/core-elasticsearch-server';
import { baselineTypes } from './kibana_migrator_test_kit.fixtures';
export async function expectDocumentsMigratedToHighestVersion(
client: ElasticsearchClient,
index: string | string[]
) {
const typeMigrationVersions: Record<string, string> = {
basic: '10.1.0', // did not define any model versions
server: '10.1.0', // did not define any model versions
deprecated: '10.1.0', // did not define any model versions
complex: '10.2.0',
task: '10.2.0',
};
const resultSets = await Promise.all(
baselineTypes.map(({ name: type }) =>
client.search<any>({
index,
query: {
bool: {
should: [
{
term: { type },
},
],
},
},
})
)
);
const notUpgraded = resultSets
.flatMap((result) => result.hits.hits)
.find(
(document) =>
document._source.typeMigrationVersion !== typeMigrationVersions[document._source.type]
);
expect(notUpgraded).toBeUndefined();
}

View file

@ -9,6 +9,21 @@
import type { SavedObjectsBulkCreateObject } from '@kbn/core-saved-objects-api-server';
import type { SavedObjectsType } from '@kbn/core-saved-objects-server';
import type { IndexTypesMap } from '@kbn/core-saved-objects-base-server-internal';
import type { ElasticsearchClientWrapperFactory } from './elasticsearch_client_wrapper';
import {
currentVersion,
defaultKibanaIndex,
defaultKibanaTaskIndex,
defaultLogFilePath,
getKibanaMigratorTestKit,
nextMinor,
} from './kibana_migrator_test_kit';
export const baselineIndexTypesMap: IndexTypesMap = {
[defaultKibanaIndex]: ['basic', 'complex', 'server', 'deprecated'],
[defaultKibanaTaskIndex]: ['task'],
};
const defaultType: SavedObjectsType<any> = {
name: 'defaultType',
@ -41,6 +56,11 @@ export const baselineTypes: Array<SavedObjectsType<any>> = [
...defaultType,
name: 'deprecated',
},
{
...defaultType,
name: 'task',
indexPattern: `${defaultKibanaIndex}_tasks`,
},
{
...defaultType,
name: 'complex',
@ -48,42 +68,293 @@ export const baselineTypes: Array<SavedObjectsType<any>> = [
properties: {
name: { type: 'text' },
value: { type: 'integer' },
firstHalf: { type: 'boolean' },
},
},
excludeOnUpgrade: () => {
return {
bool: {
must: [{ term: { type: 'complex' } }, { range: { 'complex.value': { lte: 1 } } }],
must: [{ term: { type: 'complex' } }, { term: { 'complex.firstHalf': false } }],
},
};
},
},
];
export const baselineDocuments: SavedObjectsBulkCreateObject[] = [
...['server-foo', 'server-bar', 'server-baz'].map((name) => ({
type: 'server',
attributes: {
name,
export const getUpToDateBaselineTypes = (filterDeprecated: boolean) =>
baselineTypes.filter((type) => !filterDeprecated || type.name !== 'deprecated');
export const getCompatibleBaselineTypes = (filterDeprecated: boolean) =>
getUpToDateBaselineTypes(filterDeprecated).map<SavedObjectsType>((type) => {
// introduce a compatible change
if (type.name === 'complex') {
return {
...type,
mappings: {
properties: {
...type.mappings.properties,
createdAt: { type: 'date' },
},
},
modelVersions: {
...type.modelVersions,
2: {
changes: [
{
type: 'mappings_addition',
addedMappings: {
createdAt: { type: 'date' },
},
},
],
},
},
};
} else {
return type;
}
});
export const getReindexingBaselineTypes = (filterDeprecated: boolean) =>
getUpToDateBaselineTypes(filterDeprecated).map<SavedObjectsType>((type) => {
// introduce an incompatible change
if (type.name === 'complex') {
return {
...type,
mappings: {
properties: {
...type.mappings.properties,
value: { type: 'text' }, // we're forcing an incompatible udpate (number => text)
createdAt: { type: 'date' },
},
},
modelVersions: {
...type.modelVersions,
2: {
changes: [
{
type: 'data_removal', // not true (we're testing reindex migrations, and modelVersions do not support breaking changes)
removedAttributePaths: ['complex.properties.value'],
},
{
type: 'mappings_addition',
addedMappings: {
createdAt: { type: 'date' },
},
},
{
type: 'unsafe_transform',
transformFn: (doc) => {
if (doc.attributes.value % 100 === 0) {
throw new Error(
`Cannot convert 'complex' objects with values that are multiple of 100 ${doc.id}`
);
}
return { document: doc };
},
},
],
},
},
};
} else if (type.name === 'task') {
return {
...type,
mappings: {
properties: {
...type.mappings.properties,
lastRun: { type: 'date' },
},
},
modelVersions: {
...type.modelVersions,
2: {
changes: [
{
type: 'mappings_addition',
addedMappings: {
lastRun: { type: 'date' },
},
},
],
},
},
};
} else {
return type;
}
});
export interface GetBaselineDocumentsParams {
documentsPerType?: number;
}
export const getBaselineDocuments = (
params: GetBaselineDocumentsParams = {}
): SavedObjectsBulkCreateObject[] => {
const documentsPerType = params.documentsPerType ?? 4;
return [
...new Array(documentsPerType).fill(true).map((_, index) => ({
type: 'server',
attributes: {
name: `server-${index}`,
},
})),
...new Array(documentsPerType).fill(true).map((_, index) => ({
type: 'basic',
attributes: {
name: `basic-${index}`,
},
})),
...new Array(documentsPerType).fill(true).map((_, index) => ({
type: 'deprecated',
attributes: {
name: `deprecated-${index}`,
},
})),
...new Array(documentsPerType).fill(true).map((_, index) => ({
type: 'task',
attributes: {
name: `task-${index}`,
},
})),
...new Array(documentsPerType).fill(true).map((_, index) => ({
type: 'complex',
attributes: {
name: `complex-${index}`,
firstHalf: index < documentsPerType / 2,
value: index,
},
})),
];
};
export interface CreateBaselineParams {
documentsPerType?: number;
}
export const createBaseline = async (params: CreateBaselineParams = {}) => {
const { client, runMigrations, savedObjectsRepository } = await getKibanaMigratorTestKit({
kibanaIndex: defaultKibanaIndex,
types: baselineTypes,
});
// remove the testing indices (current and next minor)
await client.indices.delete({
index: [
defaultKibanaIndex,
`${defaultKibanaIndex}_${currentVersion}_001`,
`${defaultKibanaIndex}_${nextMinor}_001`,
defaultKibanaTaskIndex,
`${defaultKibanaTaskIndex}_${currentVersion}_001`,
`${defaultKibanaTaskIndex}_${nextMinor}_001`,
],
ignore_unavailable: true,
});
await runMigrations();
await savedObjectsRepository.bulkCreate(getBaselineDocuments(params), {
refresh: 'wait_for',
});
return client;
};
interface GetMutatedMigratorParams {
logFilePath?: string;
kibanaVersion?: string;
filterDeprecated?: boolean;
types?: Array<SavedObjectsType<any>>;
settings?: Record<string, any>;
clientWrapperFactory?: ElasticsearchClientWrapperFactory;
}
export const getUpToDateMigratorTestKit = async ({
logFilePath = defaultLogFilePath,
filterDeprecated = false,
kibanaVersion = nextMinor,
settings = {},
}: GetMutatedMigratorParams = {}) => {
return await getKibanaMigratorTestKit({
types: getUpToDateBaselineTypes(filterDeprecated),
logFilePath,
kibanaVersion,
settings,
});
};
export const getCompatibleMigratorTestKit = async ({
logFilePath = defaultLogFilePath,
filterDeprecated = false,
kibanaVersion = nextMinor,
settings = {},
}: GetMutatedMigratorParams & {
filterDeprecated?: boolean;
} = {}) => {
return await getKibanaMigratorTestKit({
logFilePath,
types: getCompatibleBaselineTypes(filterDeprecated),
kibanaVersion,
settings,
});
};
export const getReindexingMigratorTestKit = async ({
logFilePath = defaultLogFilePath,
filterDeprecated = false,
types = getReindexingBaselineTypes(filterDeprecated),
kibanaVersion = nextMinor,
clientWrapperFactory,
settings = {},
}: GetMutatedMigratorParams = {}) => {
return await getKibanaMigratorTestKit({
logFilePath,
types,
kibanaVersion,
clientWrapperFactory,
settings: {
...settings,
migrations: {
discardUnknownObjects: nextMinor,
discardCorruptObjects: nextMinor,
...settings.migrations,
},
},
});
};
export const kibanaSplitIndex = `${defaultKibanaIndex}_split`;
export const getRelocatingMigratorTestKit = async ({
logFilePath = defaultLogFilePath,
filterDeprecated = false,
// relocate 'task' and 'basic' objects to a new SO index
relocateTypes = {
task: kibanaSplitIndex,
basic: kibanaSplitIndex,
},
types = getReindexingBaselineTypes(filterDeprecated).map((type) => ({
...type,
...(relocateTypes[type.name] && { indexPattern: relocateTypes[type.name] }),
})),
...['basic-foo', 'basic-bar', 'basic-baz'].map((name) => ({
type: 'basic',
attributes: {
name,
kibanaVersion = nextMinor,
clientWrapperFactory,
settings = {},
}: GetMutatedMigratorParams & { relocateTypes?: Record<string, string> } = {}) => {
return await getKibanaMigratorTestKit({
logFilePath,
types,
kibanaVersion,
clientWrapperFactory,
defaultIndexTypesMap: baselineIndexTypesMap,
settings: {
...settings,
migrations: {
discardUnknownObjects: nextMinor,
discardCorruptObjects: nextMinor,
...settings.migrations,
},
},
})),
...['deprecated-foo', 'deprecated-bar', 'deprecated-baz'].map((name) => ({
type: 'deprecated',
attributes: {
name,
},
})),
...['complex-foo', 'complex-bar', 'complex-baz', 'complex-lipsum'].map((name, index) => ({
type: 'complex',
attributes: {
name,
value: index,
},
})),
];
});
};

View file

@ -49,7 +49,6 @@ import type { ISavedObjectsRepository } from '@kbn/core-saved-objects-api-server
import { getDocLinks, getDocLinksMeta } from '@kbn/doc-links';
import type { DocLinksServiceStart } from '@kbn/core-doc-links-server';
import type { NodeRoles } from '@kbn/core-node-server';
import { baselineDocuments, baselineTypes } from './kibana_migrator_test_kit.fixtures';
import { delay } from './test_utils';
import type { ElasticsearchClientWrapperFactory } from './elasticsearch_client_wrapper';
@ -60,7 +59,8 @@ const env = Env.createDefault(REPO_ROOT, getEnvOptions());
export const currentVersion = env.packageInfo.version;
export const nextMinor = new SemVer(currentVersion).inc('minor').format();
export const currentBranch = env.packageInfo.branch;
export const defaultKibanaIndex = '.kibana_migrator_tests';
export const defaultKibanaIndex = '.kibana_migrator';
export const defaultKibanaTaskIndex = `${defaultKibanaIndex}_tasks`;
export const defaultNodeRoles: NodeRoles = { migrator: true, ui: true, backgroundTasks: true };
export interface GetEsClientParams {
@ -91,10 +91,12 @@ export interface KibanaMigratorTestKit {
}
export const startElasticsearch = async ({
esVersion,
basePath,
dataArchive,
timeout,
}: {
esVersion?: string;
basePath?: string;
dataArchive?: string;
timeout?: number;
@ -106,6 +108,7 @@ export const startElasticsearch = async ({
license: 'basic',
basePath,
dataArchive,
esVersion,
},
},
});
@ -345,8 +348,8 @@ export const deleteSavedObjectIndices = async (
export const getAggregatedTypesCount = async (
client: ElasticsearchClient,
index: string
): Promise<Record<string, number> | undefined> => {
index: string | string[] = [defaultKibanaIndex, defaultKibanaTaskIndex]
): Promise<Record<string, number>> => {
try {
await client.indices.refresh({ index });
const response = await client.search<unknown, { typesAggregation: { buckets: any[] } }>({
@ -383,26 +386,12 @@ export const getAggregatedTypesCount = async (
);
} catch (error) {
if (error.meta?.statusCode === 404) {
return undefined;
return {};
}
throw error;
}
};
export const getAggregatedTypesCountAllIndices = async (esClient: ElasticsearchClient) => {
const typeBreakdown = await Promise.all(
ALL_SAVED_OBJECT_INDICES.map((index) => getAggregatedTypesCount(esClient, index))
);
return ALL_SAVED_OBJECT_INDICES.reduce<Record<string, Record<string, number> | undefined>>(
(acc, index, pos) => {
acc[index] = typeBreakdown[pos];
return acc;
},
{}
);
};
const registerTypes = (
typeRegistry: SavedObjectTypeRegistry,
types?: Array<SavedObjectsType<any>>
@ -410,157 +399,6 @@ const registerTypes = (
(types || []).forEach((type) => typeRegistry.registerType(type));
};
export const createBaseline = async () => {
const { client, runMigrations, savedObjectsRepository } = await getKibanaMigratorTestKit({
kibanaIndex: defaultKibanaIndex,
types: baselineTypes,
});
// remove the testing index (current and next minor)
await client.indices.delete({
index: [
defaultKibanaIndex,
`${defaultKibanaIndex}_${currentVersion}_001`,
`${defaultKibanaIndex}_${nextMinor}_001`,
],
ignore_unavailable: true,
});
await runMigrations();
await savedObjectsRepository.bulkCreate(baselineDocuments, {
refresh: 'wait_for',
});
return client;
};
interface GetMutatedMigratorParams {
logFilePath?: string;
kibanaVersion?: string;
settings?: Record<string, any>;
}
export const getIdenticalMappingsMigrator = async ({
logFilePath = defaultLogFilePath,
kibanaVersion = nextMinor,
settings = {},
}: GetMutatedMigratorParams = {}) => {
return await getKibanaMigratorTestKit({
logFilePath,
types: baselineTypes,
kibanaVersion,
settings,
});
};
export const getNonDeprecatedMappingsMigrator = async ({
logFilePath = defaultLogFilePath,
kibanaVersion = nextMinor,
settings = {},
}: GetMutatedMigratorParams = {}) => {
return await getKibanaMigratorTestKit({
logFilePath,
types: baselineTypes.filter((type) => type.name !== 'deprecated'),
kibanaVersion,
settings,
});
};
export const getCompatibleMappingsMigrator = async ({
logFilePath = defaultLogFilePath,
filterDeprecated = false,
kibanaVersion = nextMinor,
settings = {},
}: GetMutatedMigratorParams & {
filterDeprecated?: boolean;
} = {}) => {
const types = baselineTypes
.filter((type) => !filterDeprecated || type.name !== 'deprecated')
.map<SavedObjectsType>((type) => {
if (type.name === 'complex') {
return {
...type,
mappings: {
properties: {
...type.mappings.properties,
createdAt: { type: 'date' },
},
},
modelVersions: {
...type.modelVersions,
2: {
changes: [
{
type: 'mappings_addition',
addedMappings: {
createdAt: { type: 'date' },
},
},
],
},
},
};
} else {
return type;
}
});
return await getKibanaMigratorTestKit({
logFilePath,
types,
kibanaVersion,
settings,
});
};
export const getIncompatibleMappingsMigrator = async ({
logFilePath = defaultLogFilePath,
kibanaVersion = nextMinor,
settings = {},
}: GetMutatedMigratorParams = {}) => {
const types = baselineTypes.map<SavedObjectsType>((type) => {
if (type.name === 'complex') {
return {
...type,
mappings: {
properties: {
...type.mappings.properties,
value: { type: 'text' }, // we're forcing an incompatible udpate (number => text)
createdAt: { type: 'date' },
},
},
modelVersions: {
...type.modelVersions,
2: {
changes: [
{
type: 'data_removal', // not true (we're testing reindex migrations, and modelVersions do not support breaking changes)
removedAttributePaths: ['complex.properties.value'],
},
{
type: 'mappings_addition',
addedMappings: {
createdAt: { type: 'date' },
},
},
],
},
},
};
} else {
return type;
}
});
return await getKibanaMigratorTestKit({
logFilePath,
types,
kibanaVersion,
settings,
});
};
export const getCurrentVersionTypeRegistry = async ({
oss,
}: {