mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 17:28:26 -04:00
Re-enable & fix flaky test (#112260)
* Fixes #87968 Re-enable & fix flaky test * Review comments Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
2e0d2ba2da
commit
7040b47840
2 changed files with 255 additions and 124 deletions
|
@ -21,7 +21,7 @@ import { Root } from '../../../root';
|
|||
|
||||
const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
|
||||
|
||||
const logFilePath = Path.join(__dirname, 'migration_from_v1.log');
|
||||
const logFilePath = Path.join(__dirname, 'migration_from_older_v1.log');
|
||||
|
||||
const asyncUnlink = Util.promisify(Fs.unlink);
|
||||
async function removeLogFile() {
|
||||
|
@ -53,7 +53,10 @@ async function fetchDocuments(esClient: ElasticsearchClient, index: string) {
|
|||
.sort(sortByTypeAndId);
|
||||
}
|
||||
|
||||
describe('migration v2', () => {
|
||||
describe('migrating from 7.3.0-xpack which used v1 migrations', () => {
|
||||
const migratedIndex = `.kibana_${kibanaVersion}_001`;
|
||||
const originalIndex = `.kibana_1`; // v1 migrations index
|
||||
|
||||
let esServer: kbnTestServer.TestElasticsearchUtils;
|
||||
let root: Root;
|
||||
let coreStart: InternalCoreStart;
|
||||
|
@ -159,136 +162,50 @@ describe('migration v2', () => {
|
|||
await new Promise((resolve) => setTimeout(resolve, 10000));
|
||||
};
|
||||
|
||||
// FLAKY: https://github.com/elastic/kibana/issues/87968
|
||||
describe.skip('migrating from 7.3.0-xpack version', () => {
|
||||
const migratedIndex = `.kibana_${kibanaVersion}_001`;
|
||||
|
||||
beforeAll(async () => {
|
||||
await removeLogFile();
|
||||
await startServers({
|
||||
oss: false,
|
||||
dataArchive: Path.join(__dirname, 'archives', '7.3.0_xpack_sample_saved_objects.zip'),
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await stopServers();
|
||||
});
|
||||
|
||||
it('creates the new index and the correct aliases', async () => {
|
||||
const { body } = await esClient.indices.get(
|
||||
{
|
||||
index: migratedIndex,
|
||||
},
|
||||
{ ignore: [404] }
|
||||
);
|
||||
|
||||
const response = body[migratedIndex];
|
||||
|
||||
expect(response).toBeDefined();
|
||||
expect(Object.keys(response.aliases!).sort()).toEqual([
|
||||
'.kibana',
|
||||
`.kibana_${kibanaVersion}`,
|
||||
]);
|
||||
});
|
||||
|
||||
it('copies all the document of the previous index to the new one', async () => {
|
||||
const migratedIndexResponse = await esClient.count({
|
||||
index: migratedIndex,
|
||||
});
|
||||
const oldIndexResponse = await esClient.count({
|
||||
index: '.kibana_1',
|
||||
});
|
||||
|
||||
// Use a >= comparison since once Kibana has started it might create new
|
||||
// documents like telemetry tasks
|
||||
expect(migratedIndexResponse.body.count).toBeGreaterThanOrEqual(oldIndexResponse.body.count);
|
||||
});
|
||||
|
||||
it('migrates the documents to the highest version', async () => {
|
||||
const expectedVersions = getExpectedVersionPerType();
|
||||
const res = await esClient.search({
|
||||
index: migratedIndex,
|
||||
body: {
|
||||
sort: ['_doc'],
|
||||
},
|
||||
size: 10000,
|
||||
});
|
||||
const allDocuments = res.body.hits.hits as SavedObjectsRawDoc[];
|
||||
allDocuments.forEach((doc) => {
|
||||
assertMigrationVersion(doc, expectedVersions);
|
||||
});
|
||||
beforeAll(async () => {
|
||||
await removeLogFile();
|
||||
await startServers({
|
||||
oss: false,
|
||||
dataArchive: Path.join(__dirname, 'archives', '7.3.0_xpack_sample_saved_objects.zip'),
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrating from the same Kibana version that used v1 migrations', () => {
|
||||
const originalIndex = `.kibana_1`; // v1 migrations index
|
||||
const migratedIndex = `.kibana_${kibanaVersion}_001`;
|
||||
afterAll(async () => {
|
||||
await stopServers();
|
||||
});
|
||||
|
||||
beforeAll(async () => {
|
||||
await removeLogFile();
|
||||
await startServers({
|
||||
oss: false,
|
||||
dataArchive: Path.join(
|
||||
__dirname,
|
||||
'archives',
|
||||
'8.0.0_v1_migrations_sample_data_saved_objects.zip'
|
||||
),
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await stopServers();
|
||||
});
|
||||
|
||||
it('creates the new index and the correct aliases', async () => {
|
||||
const { body } = await esClient.indices.get(
|
||||
{
|
||||
index: migratedIndex,
|
||||
},
|
||||
{ ignore: [404] }
|
||||
);
|
||||
const response = body[migratedIndex];
|
||||
|
||||
expect(response).toBeDefined();
|
||||
expect(Object.keys(response.aliases!).sort()).toEqual([
|
||||
'.kibana',
|
||||
`.kibana_${kibanaVersion}`,
|
||||
]);
|
||||
});
|
||||
|
||||
it('copies the documents from the previous index to the new one', async () => {
|
||||
// original assertion on document count comparison (how atteched are we to this assertion?)
|
||||
const migratedIndexResponse = await esClient.count({
|
||||
it('creates the new index and the correct aliases', async () => {
|
||||
const { body } = await esClient.indices.get(
|
||||
{
|
||||
index: migratedIndex,
|
||||
});
|
||||
const oldIndexResponse = await esClient.count({
|
||||
index: originalIndex,
|
||||
});
|
||||
},
|
||||
{ ignore: [404] }
|
||||
);
|
||||
|
||||
// Use a >= comparison since once Kibana has started it might create new
|
||||
// documents like telemetry tasks
|
||||
expect(migratedIndexResponse.body.count).toBeGreaterThanOrEqual(oldIndexResponse.body.count);
|
||||
const response = body[migratedIndex];
|
||||
|
||||
// new assertion against a document array comparison
|
||||
const originalDocs = await fetchDocuments(esClient, originalIndex);
|
||||
const migratedDocs = await fetchDocuments(esClient, migratedIndex);
|
||||
expect(assertMigratedDocuments(migratedDocs, originalDocs));
|
||||
expect(response).toBeDefined();
|
||||
expect(Object.keys(response.aliases!).sort()).toEqual(['.kibana', `.kibana_${kibanaVersion}`]);
|
||||
});
|
||||
|
||||
it('copies all the document of the previous index to the new one', async () => {
|
||||
const originalDocs = await fetchDocuments(esClient, originalIndex);
|
||||
const migratedDocs = await fetchDocuments(esClient, migratedIndex);
|
||||
expect(assertMigratedDocuments(migratedDocs, originalDocs));
|
||||
});
|
||||
|
||||
it('migrates the documents to the highest version', async () => {
|
||||
const expectedVersions = getExpectedVersionPerType();
|
||||
const res = await esClient.search({
|
||||
index: migratedIndex,
|
||||
body: {
|
||||
sort: ['_doc'],
|
||||
},
|
||||
size: 10000,
|
||||
});
|
||||
|
||||
it('migrates the documents to the highest version', async () => {
|
||||
const expectedVersions = getExpectedVersionPerType();
|
||||
const res = await esClient.search({
|
||||
index: migratedIndex,
|
||||
body: {
|
||||
sort: ['_doc'],
|
||||
},
|
||||
size: 10000,
|
||||
});
|
||||
const allDocuments = res.body.hits.hits as SavedObjectsRawDoc[];
|
||||
allDocuments.forEach((doc) => {
|
||||
assertMigrationVersion(doc, expectedVersions);
|
||||
});
|
||||
const allDocuments = res.body.hits.hits as SavedObjectsRawDoc[];
|
||||
allDocuments.forEach((doc) => {
|
||||
assertMigrationVersion(doc, expectedVersions);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,214 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
import Fs from 'fs';
|
||||
import Util from 'util';
|
||||
import Semver from 'semver';
|
||||
import { REPO_ROOT } from '@kbn/dev-utils';
|
||||
import { Env } from '@kbn/config';
|
||||
import { getEnvOptions } from '../../../config/mocks';
|
||||
import * as kbnTestServer from '../../../../test_helpers/kbn_server';
|
||||
import { ElasticsearchClient } from '../../../elasticsearch';
|
||||
import { SavedObjectsRawDoc } from '../../serialization';
|
||||
import { InternalCoreStart } from '../../../internal_types';
|
||||
import { Root } from '../../../root';
|
||||
|
||||
const kibanaVersion = Env.createDefault(REPO_ROOT, getEnvOptions()).packageInfo.version;
|
||||
|
||||
const logFilePath = Path.join(__dirname, 'migration_from_same_v1.log');
|
||||
|
||||
const asyncUnlink = Util.promisify(Fs.unlink);
|
||||
async function removeLogFile() {
|
||||
// ignore errors if it doesn't exist
|
||||
await asyncUnlink(logFilePath).catch(() => void 0);
|
||||
}
|
||||
const assertMigratedDocuments = (arr: any[], target: any[]) => target.every((v) => arr.includes(v));
|
||||
|
||||
function sortByTypeAndId(a: { type: string; id: string }, b: { type: string; id: string }) {
|
||||
return a.type.localeCompare(b.type) || a.id.localeCompare(b.id);
|
||||
}
|
||||
|
||||
async function fetchDocuments(esClient: ElasticsearchClient, index: string) {
|
||||
const { body } = await esClient.search<any>({
|
||||
index,
|
||||
body: {
|
||||
query: {
|
||||
match_all: {},
|
||||
},
|
||||
_source: ['type', 'id'],
|
||||
},
|
||||
});
|
||||
|
||||
return body.hits.hits
|
||||
.map((h) => ({
|
||||
...h._source,
|
||||
id: h._id,
|
||||
}))
|
||||
.sort(sortByTypeAndId);
|
||||
}
|
||||
|
||||
describe('migrating from the same Kibana version that used v1 migrations', () => {
|
||||
const originalIndex = `.kibana_1`; // v1 migrations index
|
||||
const migratedIndex = `.kibana_${kibanaVersion}_001`;
|
||||
|
||||
let esServer: kbnTestServer.TestElasticsearchUtils;
|
||||
let root: Root;
|
||||
let coreStart: InternalCoreStart;
|
||||
let esClient: ElasticsearchClient;
|
||||
|
||||
const startServers = async ({ dataArchive, oss }: { dataArchive: string; oss: boolean }) => {
|
||||
const { startES } = kbnTestServer.createTestServers({
|
||||
adjustTimeout: (t: number) => jest.setTimeout(t),
|
||||
settings: {
|
||||
es: {
|
||||
license: 'basic',
|
||||
dataArchive,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
root = kbnTestServer.createRootWithCorePlugins(
|
||||
{
|
||||
migrations: {
|
||||
skip: false,
|
||||
enableV2: true,
|
||||
// There are 40 docs in fixtures. Batch size configured to enforce 3 migration steps.
|
||||
batchSize: 15,
|
||||
},
|
||||
logging: {
|
||||
appenders: {
|
||||
file: {
|
||||
type: 'file',
|
||||
fileName: logFilePath,
|
||||
layout: {
|
||||
type: 'json',
|
||||
},
|
||||
},
|
||||
},
|
||||
loggers: [
|
||||
{
|
||||
name: 'root',
|
||||
appenders: ['file'],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
oss,
|
||||
}
|
||||
);
|
||||
|
||||
const startEsPromise = startES().then((es) => (esServer = es));
|
||||
const startKibanaPromise = root
|
||||
.preboot()
|
||||
.then(() => root.setup())
|
||||
.then(() => root.start())
|
||||
.then((start) => {
|
||||
coreStart = start;
|
||||
esClient = coreStart.elasticsearch.client.asInternalUser;
|
||||
});
|
||||
return await Promise.all([startEsPromise, startKibanaPromise]);
|
||||
};
|
||||
|
||||
const getExpectedVersionPerType = () =>
|
||||
coreStart.savedObjects
|
||||
.getTypeRegistry()
|
||||
.getAllTypes()
|
||||
.reduce((versionMap, type) => {
|
||||
const { name, migrations, convertToMultiNamespaceTypeVersion } = type;
|
||||
if (migrations || convertToMultiNamespaceTypeVersion) {
|
||||
const migrationsMap = typeof migrations === 'function' ? migrations() : migrations;
|
||||
const migrationsKeys = migrationsMap ? Object.keys(migrationsMap) : [];
|
||||
if (convertToMultiNamespaceTypeVersion) {
|
||||
// Setting this option registers a conversion migration that is reflected in the object's `migrationVersions` field
|
||||
migrationsKeys.push(convertToMultiNamespaceTypeVersion);
|
||||
}
|
||||
const highestVersion = migrationsKeys.sort(Semver.compare).reverse()[0];
|
||||
return {
|
||||
...versionMap,
|
||||
[name]: highestVersion,
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
...versionMap,
|
||||
[name]: undefined,
|
||||
};
|
||||
}
|
||||
}, {} as Record<string, string | undefined>);
|
||||
|
||||
const assertMigrationVersion = (
|
||||
doc: SavedObjectsRawDoc,
|
||||
expectedVersions: Record<string, string | undefined>
|
||||
) => {
|
||||
const migrationVersions = doc._source.migrationVersion;
|
||||
const type = doc._source.type;
|
||||
expect(migrationVersions ? migrationVersions[type] : undefined).toEqual(expectedVersions[type]);
|
||||
};
|
||||
|
||||
const stopServers = async () => {
|
||||
if (root) {
|
||||
await root.shutdown();
|
||||
}
|
||||
if (esServer) {
|
||||
await esServer.stop();
|
||||
}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 10000));
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
await removeLogFile();
|
||||
await startServers({
|
||||
oss: false,
|
||||
dataArchive: Path.join(
|
||||
__dirname,
|
||||
'archives',
|
||||
'8.0.0_v1_migrations_sample_data_saved_objects.zip'
|
||||
),
|
||||
});
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
await stopServers();
|
||||
});
|
||||
|
||||
it('creates the new index and the correct aliases', async () => {
|
||||
const { body } = await esClient.indices.get(
|
||||
{
|
||||
index: migratedIndex,
|
||||
},
|
||||
{ ignore: [404] }
|
||||
);
|
||||
const response = body[migratedIndex];
|
||||
|
||||
expect(response).toBeDefined();
|
||||
expect(Object.keys(response.aliases!).sort()).toEqual(['.kibana', `.kibana_${kibanaVersion}`]);
|
||||
});
|
||||
|
||||
it('copies the documents from the previous index to the new one', async () => {
|
||||
const originalDocs = await fetchDocuments(esClient, originalIndex);
|
||||
const migratedDocs = await fetchDocuments(esClient, migratedIndex);
|
||||
expect(assertMigratedDocuments(migratedDocs, originalDocs));
|
||||
});
|
||||
|
||||
it('migrates the documents to the highest version', async () => {
|
||||
const expectedVersions = getExpectedVersionPerType();
|
||||
const res = await esClient.search({
|
||||
index: migratedIndex,
|
||||
body: {
|
||||
sort: ['_doc'],
|
||||
},
|
||||
size: 10000,
|
||||
});
|
||||
const allDocuments = res.body.hits.hits as SavedObjectsRawDoc[];
|
||||
allDocuments.forEach((doc) => {
|
||||
assertMigrationVersion(doc, expectedVersions);
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Add table
Add a link
Reference in a new issue