mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
* manually revert #105213 * enabled xpack to register types for some IT tests * fiz doc size Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Pierre Gayvallet <pierre.gayvallet@elastic.co>
This commit is contained in:
parent
56fa1f1e22
commit
8de5502ef9
13 changed files with 94 additions and 299 deletions
|
@ -98,12 +98,10 @@ describe('checkForUnknownDocs', () => {
|
|||
const result = await task();
|
||||
|
||||
expect(Either.isRight(result)).toBe(true);
|
||||
expect((result as Either.Right<any>).right).toEqual({
|
||||
unknownDocs: [],
|
||||
});
|
||||
expect((result as Either.Right<any>).right).toEqual({});
|
||||
});
|
||||
|
||||
it('resolves with `Either.right` when unknown docs are found', async () => {
|
||||
it('resolves with `Either.left` when unknown docs are found', async () => {
|
||||
const client = elasticsearchClientMock.createInternalClient(
|
||||
elasticsearchClientMock.createSuccessTransportRequestPromise({
|
||||
hits: {
|
||||
|
@ -124,8 +122,9 @@ describe('checkForUnknownDocs', () => {
|
|||
|
||||
const result = await task();
|
||||
|
||||
expect(Either.isRight(result)).toBe(true);
|
||||
expect((result as Either.Right<any>).right).toEqual({
|
||||
expect(Either.isLeft(result)).toBe(true);
|
||||
expect((result as Either.Left<any>).left).toEqual({
|
||||
type: 'unknown_docs_found',
|
||||
unknownDocs: [
|
||||
{ id: '12', type: 'foo' },
|
||||
{ id: '14', type: 'bar' },
|
||||
|
@ -151,8 +150,9 @@ describe('checkForUnknownDocs', () => {
|
|||
|
||||
const result = await task();
|
||||
|
||||
expect(Either.isRight(result)).toBe(true);
|
||||
expect((result as Either.Right<any>).right).toEqual({
|
||||
expect(Either.isLeft(result)).toBe(true);
|
||||
expect((result as Either.Left<any>).left).toEqual({
|
||||
type: 'unknown_docs_found',
|
||||
unknownDocs: [{ id: '12', type: 'unknown' }],
|
||||
});
|
||||
});
|
||||
|
|
|
@ -32,6 +32,7 @@ export interface CheckForUnknownDocsFoundDoc {
|
|||
|
||||
/** @internal */
|
||||
export interface UnknownDocsFound {
|
||||
type: 'unknown_docs_found';
|
||||
unknownDocs: CheckForUnknownDocsFoundDoc[];
|
||||
}
|
||||
|
||||
|
@ -41,7 +42,10 @@ export const checkForUnknownDocs =
|
|||
indexName,
|
||||
unusedTypesQuery,
|
||||
knownTypes,
|
||||
}: CheckForUnknownDocsParams): TaskEither.TaskEither<RetryableEsClientError, UnknownDocsFound> =>
|
||||
}: CheckForUnknownDocsParams): TaskEither.TaskEither<
|
||||
RetryableEsClientError | UnknownDocsFound,
|
||||
{}
|
||||
> =>
|
||||
() => {
|
||||
const query = createUnknownDocQuery(unusedTypesQuery, knownTypes);
|
||||
|
||||
|
@ -54,9 +58,14 @@ export const checkForUnknownDocs =
|
|||
})
|
||||
.then((response) => {
|
||||
const { hits } = response.body.hits;
|
||||
return Either.right({
|
||||
unknownDocs: hits.map((hit) => ({ id: hit._id, type: hit._source?.type ?? 'unknown' })),
|
||||
});
|
||||
if (hits.length) {
|
||||
return Either.left({
|
||||
type: 'unknown_docs_found' as const,
|
||||
unknownDocs: hits.map((hit) => ({ id: hit._id, type: hit._source?.type ?? 'unknown' })),
|
||||
});
|
||||
} else {
|
||||
return Either.right({});
|
||||
}
|
||||
})
|
||||
.catch(catchRetryableEsClientErrors);
|
||||
};
|
||||
|
|
|
@ -80,6 +80,7 @@ export type {
|
|||
} from './update_and_pickup_mappings';
|
||||
export { updateAndPickupMappings } from './update_and_pickup_mappings';
|
||||
|
||||
import type { UnknownDocsFound } from './check_for_unknown_docs';
|
||||
export type {
|
||||
CheckForUnknownDocsParams,
|
||||
UnknownDocsFound,
|
||||
|
@ -141,6 +142,7 @@ export interface ActionErrorTypeMap {
|
|||
remove_index_not_a_concrete_index: RemoveIndexNotAConcreteIndex;
|
||||
documents_transform_failed: DocumentsTransformFailed;
|
||||
request_entity_too_large_exception: RequestEntityTooLargeException;
|
||||
unknown_docs_found: UnknownDocsFound;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -139,7 +139,6 @@ describe('migrateRawDocsSafely', () => {
|
|||
]);
|
||||
const task = migrateRawDocsSafely({
|
||||
serializer: new SavedObjectsSerializer(new SavedObjectTypeRegistry()),
|
||||
knownTypes: new Set(['a', 'c']),
|
||||
migrateDoc: transform,
|
||||
rawDocs: [
|
||||
{ _id: 'a:b', _source: { type: 'a', a: { name: 'AAA' } } },
|
||||
|
@ -184,7 +183,6 @@ describe('migrateRawDocsSafely', () => {
|
|||
]);
|
||||
const task = migrateRawDocsSafely({
|
||||
serializer: new SavedObjectsSerializer(new SavedObjectTypeRegistry()),
|
||||
knownTypes: new Set(['a', 'c']),
|
||||
migrateDoc: transform,
|
||||
rawDocs: [
|
||||
{ _id: 'foo:b', _source: { type: 'a', a: { name: 'AAA' } } },
|
||||
|
@ -206,7 +204,6 @@ describe('migrateRawDocsSafely', () => {
|
|||
]);
|
||||
const task = migrateRawDocsSafely({
|
||||
serializer: new SavedObjectsSerializer(new SavedObjectTypeRegistry()),
|
||||
knownTypes: new Set(['a', 'c']),
|
||||
migrateDoc: transform,
|
||||
rawDocs: [{ _id: 'a:b', _source: { type: 'a', a: { name: 'AAA' } } }],
|
||||
});
|
||||
|
@ -240,7 +237,6 @@ describe('migrateRawDocsSafely', () => {
|
|||
});
|
||||
const task = migrateRawDocsSafely({
|
||||
serializer: new SavedObjectsSerializer(new SavedObjectTypeRegistry()),
|
||||
knownTypes: new Set(['a', 'c']),
|
||||
migrateDoc: transform,
|
||||
rawDocs: [{ _id: 'a:b', _source: { type: 'a', a: { name: 'AAA' } } }], // this is the raw doc
|
||||
});
|
||||
|
@ -256,43 +252,4 @@ describe('migrateRawDocsSafely', () => {
|
|||
}
|
||||
`);
|
||||
});
|
||||
|
||||
test('skips documents of unknown types', async () => {
|
||||
const transform = jest.fn<any, any>((doc: any) => [
|
||||
set(_.cloneDeep(doc), 'attributes.name', 'HOI!'),
|
||||
]);
|
||||
const task = migrateRawDocsSafely({
|
||||
serializer: new SavedObjectsSerializer(new SavedObjectTypeRegistry()),
|
||||
knownTypes: new Set(['a']),
|
||||
migrateDoc: transform,
|
||||
rawDocs: [
|
||||
{ _id: 'a:b', _source: { type: 'a', a: { name: 'AAA' } } },
|
||||
{ _id: 'c:d', _source: { type: 'c', c: { name: 'DDD' } } },
|
||||
],
|
||||
});
|
||||
|
||||
const result = (await task()) as Either.Right<DocumentsTransformSuccess>;
|
||||
expect(result._tag).toEqual('Right');
|
||||
expect(result.right.processedDocs).toEqual([
|
||||
{
|
||||
_id: 'a:b',
|
||||
_source: { type: 'a', a: { name: 'HOI!' }, migrationVersion: {}, references: [] },
|
||||
},
|
||||
{
|
||||
_id: 'c:d',
|
||||
// name field is not migrated on unknown type
|
||||
_source: { type: 'c', c: { name: 'DDD' } },
|
||||
},
|
||||
]);
|
||||
|
||||
const obj1 = {
|
||||
id: 'b',
|
||||
type: 'a',
|
||||
attributes: { name: 'AAA' },
|
||||
migrationVersion: {},
|
||||
references: [],
|
||||
};
|
||||
expect(transform).toHaveBeenCalledTimes(1);
|
||||
expect(transform).toHaveBeenNthCalledWith(1, obj1);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -25,13 +25,16 @@ export interface DocumentsTransformFailed {
|
|||
readonly corruptDocumentIds: string[];
|
||||
readonly transformErrors: TransformErrorObjects[];
|
||||
}
|
||||
|
||||
export interface DocumentsTransformSuccess {
|
||||
readonly processedDocs: SavedObjectsRawDoc[];
|
||||
}
|
||||
|
||||
export interface TransformErrorObjects {
|
||||
readonly rawId: string;
|
||||
readonly err: TransformSavedObjectDocumentError | Error;
|
||||
}
|
||||
|
||||
type MigrateFn = (
|
||||
doc: SavedObjectUnsanitizedDoc<unknown>
|
||||
) => Promise<Array<SavedObjectUnsanitizedDoc<unknown>>>;
|
||||
|
@ -83,7 +86,6 @@ export async function migrateRawDocs(
|
|||
|
||||
interface MigrateRawDocsSafelyDeps {
|
||||
serializer: SavedObjectsSerializer;
|
||||
knownTypes: ReadonlySet<string>;
|
||||
migrateDoc: MigrateAndConvertFn;
|
||||
rawDocs: SavedObjectsRawDoc[];
|
||||
}
|
||||
|
@ -97,7 +99,6 @@ interface MigrateRawDocsSafelyDeps {
|
|||
*/
|
||||
export function migrateRawDocsSafely({
|
||||
serializer,
|
||||
knownTypes,
|
||||
migrateDoc,
|
||||
rawDocs,
|
||||
}: MigrateRawDocsSafelyDeps): TaskEither.TaskEither<
|
||||
|
@ -111,10 +112,7 @@ export function migrateRawDocsSafely({
|
|||
const corruptSavedObjectIds: string[] = [];
|
||||
const options = { namespaceTreatment: 'lax' as const };
|
||||
for (const raw of rawDocs) {
|
||||
// Do not transform documents of unknown types
|
||||
if (raw?._source?.type && !knownTypes.has(raw._source.type)) {
|
||||
processedDocs.push(raw);
|
||||
} else if (serializer.isRawSavedObject(raw, options)) {
|
||||
if (serializer.isRawSavedObject(raw, options)) {
|
||||
try {
|
||||
const savedObject = convertToRawAddMigrationVersion(raw, options, serializer);
|
||||
processedDocs.push(
|
||||
|
|
|
@ -11,16 +11,7 @@ import fs from 'fs/promises';
|
|||
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import * as kbnTestServer from '../../../../test_helpers/kbn_server';
|
||||
import { Root } from '../../../root';
|
||||
import JSON5 from 'json5';
|
||||
import { ElasticsearchClient } from '../../../elasticsearch';
|
||||
import { Env } from '@kbn/config';
|
||||
import { REPO_ROOT } from '@kbn/utils';
|
||||
import { getEnvOptions } from '../../../config/mocks';
|
||||
import { retryAsync } from '../test_helpers/retry_async';
|
||||
import { LogRecord } from '@kbn/logging';
|
||||
|
||||
const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
|
||||
const targetIndex = `.kibana_${kibanaVersion}_001`;
|
||||
const logFilePath = Path.join(__dirname, '7_13_unknown_types.log');
|
||||
|
||||
async function removeLogFile() {
|
||||
|
@ -63,152 +54,54 @@ describe('migration v2', () => {
|
|||
await new Promise((resolve) => setTimeout(resolve, 10000));
|
||||
});
|
||||
|
||||
it('logs a warning and completes the migration with unknown docs retained', async () => {
|
||||
root = createRoot();
|
||||
esServer = await startES();
|
||||
await root.preboot();
|
||||
await root.setup();
|
||||
await root.start();
|
||||
|
||||
let unknownDocsWarningLog: LogRecord;
|
||||
|
||||
await retryAsync(
|
||||
async () => {
|
||||
const logFileContent = await fs.readFile(logFilePath, 'utf-8');
|
||||
const records = logFileContent
|
||||
.split('\n')
|
||||
.filter(Boolean)
|
||||
.map((str) => JSON5.parse(str));
|
||||
|
||||
unknownDocsWarningLog = records.find((rec) =>
|
||||
rec.message.startsWith(`[.kibana] CHECK_UNKNOWN_DOCUMENTS`)
|
||||
);
|
||||
|
||||
expect(
|
||||
unknownDocsWarningLog.message.startsWith(
|
||||
'[.kibana] CHECK_UNKNOWN_DOCUMENTS Upgrades will fail for 8.0+ because documents were found for unknown saved ' +
|
||||
'object types. To ensure that upgrades will succeed in the future, either re-enable plugins or delete ' +
|
||||
`these documents from the "${targetIndex}" index after the current upgrade completes.`
|
||||
)
|
||||
).toBeTruthy();
|
||||
},
|
||||
{ retryAttempts: 10, retryDelayMs: 200 }
|
||||
);
|
||||
|
||||
const unknownDocs = [
|
||||
{ type: 'space', id: 'space:default' },
|
||||
{ type: 'space', id: 'space:first' },
|
||||
{ type: 'space', id: 'space:second' },
|
||||
{ type: 'space', id: 'space:third' },
|
||||
{ type: 'space', id: 'space:forth' },
|
||||
{ type: 'space', id: 'space:fifth' },
|
||||
{ type: 'space', id: 'space:sixth' },
|
||||
{ type: 'foo', id: 'P2SQfHkBs3dBRGh--No5' },
|
||||
{ type: 'foo', id: 'QGSZfHkBs3dBRGh-ANoD' },
|
||||
{ type: 'foo', id: 'QWSZfHkBs3dBRGh-hNob' },
|
||||
];
|
||||
|
||||
unknownDocs.forEach(({ id, type }) => {
|
||||
expect(unknownDocsWarningLog.message).toEqual(
|
||||
expect.stringContaining(`- "${id}" (type: "${type}")`)
|
||||
);
|
||||
});
|
||||
|
||||
const client: ElasticsearchClient = esServer.es.getKibanaEsClient();
|
||||
const { body: response } = await client.indices.getSettings({
|
||||
index: targetIndex,
|
||||
});
|
||||
const settings = response[targetIndex].settings as estypes.IndicesIndexStatePrefixedSettings;
|
||||
expect(settings.index).not.toBeUndefined();
|
||||
expect(settings.index!.blocks?.write).not.toEqual('true');
|
||||
|
||||
// Ensure that documents for unknown types were preserved in target index in an unmigrated state
|
||||
const spaceDocs = await fetchDocs(client, targetIndex, 'space');
|
||||
expect(spaceDocs.map((s) => s.id)).toEqual(
|
||||
expect.arrayContaining([
|
||||
'space:default',
|
||||
'space:first',
|
||||
'space:second',
|
||||
'space:third',
|
||||
'space:forth',
|
||||
'space:fifth',
|
||||
'space:sixth',
|
||||
])
|
||||
);
|
||||
spaceDocs.forEach((d) => {
|
||||
expect(d.migrationVersion.space).toEqual('6.6.0');
|
||||
expect(d.coreMigrationVersion).toEqual('7.13.0');
|
||||
});
|
||||
const fooDocs = await fetchDocs(client, targetIndex, 'foo');
|
||||
expect(fooDocs.map((f) => f.id)).toEqual(
|
||||
expect.arrayContaining([
|
||||
'P2SQfHkBs3dBRGh--No5',
|
||||
'QGSZfHkBs3dBRGh-ANoD',
|
||||
'QWSZfHkBs3dBRGh-hNob',
|
||||
])
|
||||
);
|
||||
fooDocs.forEach((d) => {
|
||||
expect(d.migrationVersion.foo).toEqual('7.13.0');
|
||||
expect(d.coreMigrationVersion).toEqual('7.13.0');
|
||||
});
|
||||
});
|
||||
|
||||
it('migrates outdated documents when types are re-enabled', async () => {
|
||||
it('fails the migration if unknown types are found in the source index', async () => {
|
||||
// Start kibana with foo and space types disabled
|
||||
root = createRoot();
|
||||
esServer = await startES();
|
||||
await root.preboot();
|
||||
await root.setup();
|
||||
await root.start();
|
||||
|
||||
// Shutdown and start Kibana again with space type registered to ensure space docs get migrated
|
||||
await root.shutdown();
|
||||
root = createRoot();
|
||||
await root.preboot();
|
||||
const coreSetup = await root.setup();
|
||||
coreSetup.savedObjects.registerType({
|
||||
name: 'space',
|
||||
hidden: false,
|
||||
mappings: { properties: {} },
|
||||
namespaceType: 'agnostic',
|
||||
migrations: {
|
||||
'6.6.0': (d) => d,
|
||||
[kibanaVersion]: (d) => d,
|
||||
},
|
||||
});
|
||||
await root.start();
|
||||
try {
|
||||
await root.start();
|
||||
expect('should have thrown').toEqual('but it did not');
|
||||
} catch (err) {
|
||||
const errorMessage = err.message;
|
||||
|
||||
const client: ElasticsearchClient = esServer.es.getKibanaEsClient();
|
||||
const spacesDocsMigrated = await fetchDocs(client, targetIndex, 'space');
|
||||
expect(spacesDocsMigrated.map((s) => s.id)).toEqual(
|
||||
expect.arrayContaining([
|
||||
'space:default',
|
||||
'space:first',
|
||||
'space:second',
|
||||
'space:third',
|
||||
'space:forth',
|
||||
'space:fifth',
|
||||
'space:sixth',
|
||||
])
|
||||
);
|
||||
spacesDocsMigrated.forEach((d) => {
|
||||
expect(d.migrationVersion.space).toEqual(kibanaVersion); // should be migrated
|
||||
expect(d.coreMigrationVersion).toEqual(kibanaVersion);
|
||||
});
|
||||
expect(
|
||||
errorMessage.startsWith(
|
||||
'Unable to complete saved object migrations for the [.kibana] index: Migration failed because documents ' +
|
||||
'were found for unknown saved object types. To proceed with the migration, please delete these documents from the ' +
|
||||
'".kibana_7.13.0_001" index.'
|
||||
)
|
||||
).toBeTruthy();
|
||||
|
||||
// Make sure unmigrated foo docs are also still there in an unmigrated state
|
||||
const fooDocsUnmigrated = await fetchDocs(client, targetIndex, 'foo');
|
||||
expect(fooDocsUnmigrated.map((f) => f.id)).toEqual(
|
||||
expect.arrayContaining([
|
||||
'P2SQfHkBs3dBRGh--No5',
|
||||
'QGSZfHkBs3dBRGh-ANoD',
|
||||
'QWSZfHkBs3dBRGh-hNob',
|
||||
])
|
||||
);
|
||||
fooDocsUnmigrated.forEach((d) => {
|
||||
expect(d.migrationVersion.foo).toEqual('7.13.0'); // should still not be migrated
|
||||
expect(d.coreMigrationVersion).toEqual('7.13.0');
|
||||
});
|
||||
const unknownDocs = [
|
||||
{ type: 'space', id: 'space:default' },
|
||||
{ type: 'space', id: 'space:first' },
|
||||
{ type: 'space', id: 'space:second' },
|
||||
{ type: 'space', id: 'space:third' },
|
||||
{ type: 'space', id: 'space:forth' },
|
||||
{ type: 'space', id: 'space:fifth' },
|
||||
{ type: 'space', id: 'space:sixth' },
|
||||
{ type: 'foo', id: 'P2SQfHkBs3dBRGh--No5' },
|
||||
{ type: 'foo', id: 'QGSZfHkBs3dBRGh-ANoD' },
|
||||
{ type: 'foo', id: 'QWSZfHkBs3dBRGh-hNob' },
|
||||
];
|
||||
|
||||
unknownDocs.forEach(({ id, type }) => {
|
||||
expect(errorMessage).toEqual(expect.stringContaining(`- "${id}" (type: "${type}")`));
|
||||
});
|
||||
|
||||
const client = esServer.es.getClient();
|
||||
const { body: response } = await client.indices.getSettings(
|
||||
{ index: '.kibana_7.13.0_001' },
|
||||
{ meta: true }
|
||||
);
|
||||
const settings = response['.kibana_7.13.0_001']
|
||||
.settings as estypes.IndicesIndexStatePrefixedSettings;
|
||||
expect(settings.index).not.toBeUndefined();
|
||||
expect(settings.index!.blocks?.write).not.toEqual('true');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -242,26 +135,3 @@ function createRoot() {
|
|||
}
|
||||
);
|
||||
}
|
||||
|
||||
async function fetchDocs(esClient: ElasticsearchClient, index: string, type: string) {
|
||||
const { body } = await esClient.search<any>({
|
||||
index,
|
||||
size: 10000,
|
||||
body: {
|
||||
query: {
|
||||
bool: {
|
||||
should: [
|
||||
{
|
||||
term: { type },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return body.hits.hits.map((h) => ({
|
||||
...h._source,
|
||||
id: h._id,
|
||||
}));
|
||||
}
|
||||
|
|
|
@ -109,7 +109,7 @@ describe('migration v2', () => {
|
|||
await root.preboot();
|
||||
await root.setup();
|
||||
await expect(root.start()).rejects.toMatchInlineSnapshot(
|
||||
`[Error: Unable to complete saved object migrations for the [.kibana] index: The document with _id "canvas-workpad-template:workpad-template-061d7868-2b4e-4dc8-8bf7-3772b52926e5" is 1715275 bytes which exceeds the configured maximum batch size of 1015275 bytes. To proceed, please increase the 'migrations.maxBatchSizeBytes' Kibana configuration option and ensure that the Elasticsearch 'http.max_content_length' configuration option is set to an equal or larger value.]`
|
||||
`[Error: Unable to complete saved object migrations for the [.kibana] index: The document with _id "canvas-workpad-template:workpad-template-061d7868-2b4e-4dc8-8bf7-3772b52926e5" is 1715274 bytes which exceeds the configured maximum batch size of 1015275 bytes. To proceed, please increase the 'migrations.maxBatchSizeBytes' Kibana configuration option and ensure that the Elasticsearch 'http.max_content_length' configuration option is set to an equal or larger value.]`
|
||||
);
|
||||
|
||||
await retryAsync(
|
||||
|
@ -122,7 +122,7 @@ describe('migration v2', () => {
|
|||
expect(
|
||||
records.find((rec) =>
|
||||
rec.message.startsWith(
|
||||
`Unable to complete saved object migrations for the [.kibana] index: The document with _id "canvas-workpad-template:workpad-template-061d7868-2b4e-4dc8-8bf7-3772b52926e5" is 1715275 bytes which exceeds the configured maximum batch size of 1015275 bytes. To proceed, please increase the 'migrations.maxBatchSizeBytes' Kibana configuration option and ensure that the Elasticsearch 'http.max_content_length' configuration option is set to an equal or larger value.`
|
||||
`Unable to complete saved object migrations for the [.kibana] index: The document with _id "canvas-workpad-template:workpad-template-061d7868-2b4e-4dc8-8bf7-3772b52926e5" is 1715274 bytes which exceeds the configured maximum batch size of 1015275 bytes. To proceed, please increase the 'migrations.maxBatchSizeBytes' Kibana configuration option and ensure that the Elasticsearch 'http.max_content_length' configuration option is set to an equal or larger value.`
|
||||
)
|
||||
)
|
||||
).toBeDefined();
|
||||
|
@ -159,7 +159,7 @@ function createRoot(options: { maxBatchSizeBytes?: number }) {
|
|||
},
|
||||
},
|
||||
{
|
||||
oss: true,
|
||||
oss: false,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
|
|
@ -170,7 +170,7 @@ function createRoot() {
|
|||
},
|
||||
},
|
||||
{
|
||||
oss: true,
|
||||
oss: false,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
|
|
@ -170,7 +170,6 @@ export class KibanaMigrator {
|
|||
transformRawDocs: (rawDocs: SavedObjectsRawDoc[]) =>
|
||||
migrateRawDocsSafely({
|
||||
serializer: this.serializer,
|
||||
knownTypes: new Set(this.typeRegistry.getAllTypes().map((t) => t.name)),
|
||||
migrateDoc: this.documentMigrator.migrateAndConvert,
|
||||
rawDocs,
|
||||
}),
|
||||
|
|
|
@ -25,7 +25,7 @@ describe('extractUnknownDocFailureReason', () => {
|
|||
'.kibana_15'
|
||||
)
|
||||
).toMatchInlineSnapshot(`
|
||||
"Upgrades will fail for 8.0+ because documents were found for unknown saved object types. To ensure that upgrades will succeed in the future, either re-enable plugins or delete these documents from the \\".kibana_15\\" index after the current upgrade completes.
|
||||
"Migration failed because documents were found for unknown saved object types. To proceed with the migration, please delete these documents from the \\".kibana_15\\" index.
|
||||
The documents with unknown types are:
|
||||
- \\"unknownType:12\\" (type: \\"unknownType\\")
|
||||
- \\"anotherUnknownType:42\\" (type: \\"anotherUnknownType\\")
|
||||
|
|
|
@ -38,16 +38,15 @@ export function extractTransformFailuresReason(
|
|||
|
||||
export function extractUnknownDocFailureReason(
|
||||
unknownDocs: CheckForUnknownDocsFoundDoc[],
|
||||
targetIndex: string
|
||||
sourceIndex: string
|
||||
): string {
|
||||
return (
|
||||
`Upgrades will fail for 8.0+ because documents were found for unknown saved object types. ` +
|
||||
`To ensure that upgrades will succeed in the future, either re-enable plugins or delete these documents from the ` +
|
||||
`"${targetIndex}" index after the current upgrade completes.\n` +
|
||||
`Migration failed because documents were found for unknown saved object types. ` +
|
||||
`To proceed with the migration, please delete these documents from the "${sourceIndex}" index.\n` +
|
||||
`The documents with unknown types are:\n` +
|
||||
unknownDocs.map((doc) => `- "${doc.id}" (type: "${doc.type}")\n`).join('') +
|
||||
`You can delete them using the following command:\n` +
|
||||
`curl -X POST "{elasticsearch}/${targetIndex}/_bulk?pretty" -H 'Content-Type: application/json' -d'\n` +
|
||||
`curl -X POST "{elasticsearch}/${sourceIndex}/_bulk?pretty" -H 'Content-Type: application/json' -d'\n` +
|
||||
unknownDocs.map((doc) => `{ "delete" : { "_id" : "${doc.id}" } }\n`).join('') +
|
||||
`'`
|
||||
);
|
||||
|
|
|
@ -717,7 +717,7 @@ describe('migrations v2 model', () => {
|
|||
},
|
||||
} as const;
|
||||
|
||||
test('CHECK_UNKNOWN_DOCUMENTS -> SET_SOURCE_WRITE_BLOCK if action succeeds and no unknown docs are found', () => {
|
||||
test('CHECK_UNKNOWN_DOCUMENTS -> SET_SOURCE_WRITE_BLOCK if action succeeds', () => {
|
||||
const checkUnknownDocumentsSourceState: CheckUnknownDocumentsState = {
|
||||
...baseState,
|
||||
controlState: 'CHECK_UNKNOWN_DOCUMENTS',
|
||||
|
@ -725,7 +725,7 @@ describe('migrations v2 model', () => {
|
|||
sourceIndexMappings: mappingsWithUnknownType,
|
||||
};
|
||||
|
||||
const res: ResponseType<'CHECK_UNKNOWN_DOCUMENTS'> = Either.right({ unknownDocs: [] });
|
||||
const res: ResponseType<'CHECK_UNKNOWN_DOCUMENTS'> = Either.right({});
|
||||
const newState = model(checkUnknownDocumentsSourceState, res);
|
||||
expect(newState.controlState).toEqual('SET_SOURCE_WRITE_BLOCK');
|
||||
|
||||
|
@ -765,7 +765,7 @@ describe('migrations v2 model', () => {
|
|||
expect(newState.logs).toEqual([]);
|
||||
});
|
||||
|
||||
test('CHECK_UNKNOWN_DOCUMENTS -> SET_SOURCE_WRITE_BLOCK and adds log if action succeeds and unknown docs were found', () => {
|
||||
test('CHECK_UNKNOWN_DOCUMENTS -> FATAL if action fails and unknown docs were found', () => {
|
||||
const checkUnknownDocumentsSourceState: CheckUnknownDocumentsState = {
|
||||
...baseState,
|
||||
controlState: 'CHECK_UNKNOWN_DOCUMENTS',
|
||||
|
@ -773,51 +773,20 @@ describe('migrations v2 model', () => {
|
|||
sourceIndexMappings: mappingsWithUnknownType,
|
||||
};
|
||||
|
||||
const res: ResponseType<'CHECK_UNKNOWN_DOCUMENTS'> = Either.right({
|
||||
const res: ResponseType<'CHECK_UNKNOWN_DOCUMENTS'> = Either.left({
|
||||
type: 'unknown_docs_found',
|
||||
unknownDocs: [
|
||||
{ id: 'dashboard:12', type: 'dashboard' },
|
||||
{ id: 'foo:17', type: 'foo' },
|
||||
],
|
||||
});
|
||||
const newState = model(checkUnknownDocumentsSourceState, res);
|
||||
expect(newState.controlState).toEqual('SET_SOURCE_WRITE_BLOCK');
|
||||
expect(newState.controlState).toEqual('FATAL');
|
||||
|
||||
expect(newState).toMatchObject({
|
||||
controlState: 'SET_SOURCE_WRITE_BLOCK',
|
||||
sourceIndex: Option.some('.kibana_3'),
|
||||
targetIndex: '.kibana_7.11.0_001',
|
||||
});
|
||||
|
||||
// This snapshot asserts that we disable the unknown saved object
|
||||
// type. Because it's mappings are disabled, we also don't copy the
|
||||
// `_meta.migrationMappingPropertyHashes` for the disabled type.
|
||||
expect(newState.targetIndexMappings).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"_meta": Object {
|
||||
"migrationMappingPropertyHashes": Object {
|
||||
"new_saved_object_type": "4a11183eee21e6fbad864f7a30b39ad0",
|
||||
},
|
||||
},
|
||||
"properties": Object {
|
||||
"disabled_saved_object_type": Object {
|
||||
"dynamic": false,
|
||||
"properties": Object {},
|
||||
},
|
||||
"new_saved_object_type": Object {
|
||||
"properties": Object {
|
||||
"value": Object {
|
||||
"type": "text",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
`);
|
||||
|
||||
expect(newState.logs[0]).toMatchObject({
|
||||
level: 'warning',
|
||||
message: expect.stringContaining(
|
||||
'Upgrades will fail for 8.0+ because documents were found for unknown saved object types'
|
||||
controlState: 'FATAL',
|
||||
reason: expect.stringContaining(
|
||||
'Migration failed because documents were found for unknown saved object types'
|
||||
),
|
||||
});
|
||||
});
|
||||
|
|
|
@ -11,7 +11,6 @@ import * as Option from 'fp-ts/lib/Option';
|
|||
import * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
import { AliasAction, isLeftTypeof } from '../actions';
|
||||
import { MigrationLog } from '../types';
|
||||
import { AllActionStates, State } from '../state';
|
||||
import type { ResponseType } from '../next';
|
||||
import { disableUnknownTypeMappingFields } from '../core';
|
||||
|
@ -352,24 +351,17 @@ export const model = (currentState: State, resW: ResponseType<AllActionStates>):
|
|||
{ add: { index: target, alias: stateP.versionAlias } },
|
||||
{ remove_index: { index: stateP.tempIndex } },
|
||||
]),
|
||||
|
||||
logs: [
|
||||
...stateP.logs,
|
||||
...(res.right.unknownDocs.length > 0
|
||||
? ([
|
||||
{
|
||||
level: 'warning',
|
||||
message: `CHECK_UNKNOWN_DOCUMENTS ${extractUnknownDocFailureReason(
|
||||
res.right.unknownDocs,
|
||||
target
|
||||
)}`,
|
||||
},
|
||||
] as MigrationLog[])
|
||||
: []),
|
||||
],
|
||||
};
|
||||
} else {
|
||||
return throwBadResponse(stateP, res);
|
||||
if (isLeftTypeof(res.left, 'unknown_docs_found')) {
|
||||
return {
|
||||
...stateP,
|
||||
controlState: 'FATAL',
|
||||
reason: extractUnknownDocFailureReason(res.left.unknownDocs, stateP.sourceIndex.value),
|
||||
};
|
||||
} else {
|
||||
return throwBadResponse(stateP, res.left);
|
||||
}
|
||||
}
|
||||
} else if (stateP.controlState === 'SET_SOURCE_WRITE_BLOCK') {
|
||||
const res = resW as ExcludeRetryableEsError<ResponseType<typeof stateP.controlState>>;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue