mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
chore(NA): move kbn-es-archiver from mocha into jest (#87699)
* chore(NA): move kbn-es-archiver from mocha into jest * chore(NA): remove mocha temporarily from CI
This commit is contained in:
parent
354a79a280
commit
b8d21b1c77
11 changed files with 130 additions and 136 deletions
|
@ -20,10 +20,9 @@
|
|||
import Stream, { Readable, Writable } from 'stream';
|
||||
import { createGunzip } from 'zlib';
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { createListStream, createPromiseFromStreams, createConcatStream } from '@kbn/utils';
|
||||
|
||||
import { createFormatArchiveStreams } from '../format';
|
||||
import { createFormatArchiveStreams } from './format';
|
||||
|
||||
const INPUTS = [1, 2, { foo: 'bar' }, [1, 2]];
|
||||
const INPUT_JSON = INPUTS.map((i) => JSON.stringify(i, null, 2)).join('\n\n');
|
||||
|
@ -32,9 +31,9 @@ describe('esArchiver createFormatArchiveStreams', () => {
|
|||
describe('{ gzip: false }', () => {
|
||||
it('returns an array of streams', () => {
|
||||
const streams = createFormatArchiveStreams({ gzip: false });
|
||||
expect(streams).to.be.an('array');
|
||||
expect(streams.length).to.be.greaterThan(0);
|
||||
streams.forEach((s) => expect(s).to.be.a(Stream));
|
||||
expect(streams).toBeInstanceOf(Array);
|
||||
expect(streams.length).toBeGreaterThan(0);
|
||||
streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
|
||||
});
|
||||
|
||||
it('streams consume js values and produces buffers', async () => {
|
||||
|
@ -44,8 +43,8 @@ describe('esArchiver createFormatArchiveStreams', () => {
|
|||
createConcatStream([]),
|
||||
] as [Readable, ...Writable[]]);
|
||||
|
||||
expect(output.length).to.be.greaterThan(0);
|
||||
output.forEach((b) => expect(b).to.be.a(Buffer));
|
||||
expect(output.length).toBeGreaterThan(0);
|
||||
output.forEach((b) => expect(b).toBeInstanceOf(Buffer));
|
||||
});
|
||||
|
||||
it('product is pretty-printed JSON separated by two newlines', async () => {
|
||||
|
@ -55,16 +54,16 @@ describe('esArchiver createFormatArchiveStreams', () => {
|
|||
createConcatStream(''),
|
||||
] as [Readable, ...Writable[]]);
|
||||
|
||||
expect(json).to.be(INPUT_JSON);
|
||||
expect(json).toBe(INPUT_JSON);
|
||||
});
|
||||
});
|
||||
|
||||
describe('{ gzip: true }', () => {
|
||||
it('returns an array of streams', () => {
|
||||
const streams = createFormatArchiveStreams({ gzip: true });
|
||||
expect(streams).to.be.an('array');
|
||||
expect(streams.length).to.be.greaterThan(0);
|
||||
streams.forEach((s) => expect(s).to.be.a(Stream));
|
||||
expect(streams).toBeInstanceOf(Array);
|
||||
expect(streams.length).toBeGreaterThan(0);
|
||||
streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
|
||||
});
|
||||
|
||||
it('streams consume js values and produces buffers', async () => {
|
||||
|
@ -74,8 +73,8 @@ describe('esArchiver createFormatArchiveStreams', () => {
|
|||
createConcatStream([]),
|
||||
] as [Readable, ...Writable[]]);
|
||||
|
||||
expect(output.length).to.be.greaterThan(0);
|
||||
output.forEach((b) => expect(b).to.be.a(Buffer));
|
||||
expect(output.length).toBeGreaterThan(0);
|
||||
output.forEach((b) => expect(b).toBeInstanceOf(Buffer));
|
||||
});
|
||||
|
||||
it('output can be gunzipped', async () => {
|
||||
|
@ -85,7 +84,7 @@ describe('esArchiver createFormatArchiveStreams', () => {
|
|||
createGunzip(),
|
||||
createConcatStream(''),
|
||||
] as [Readable, ...Writable[]]);
|
||||
expect(output).to.be(INPUT_JSON);
|
||||
expect(output).toBe(INPUT_JSON);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -97,7 +96,7 @@ describe('esArchiver createFormatArchiveStreams', () => {
|
|||
createConcatStream(''),
|
||||
] as [Readable, ...Writable[]]);
|
||||
|
||||
expect(json).to.be(INPUT_JSON);
|
||||
expect(json).toBe(INPUT_JSON);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -20,18 +20,17 @@
|
|||
import Stream, { PassThrough, Readable, Writable, Transform } from 'stream';
|
||||
import { createGzip } from 'zlib';
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { createConcatStream, createListStream, createPromiseFromStreams } from '@kbn/utils';
|
||||
|
||||
import { createParseArchiveStreams } from '../parse';
|
||||
import { createParseArchiveStreams } from './parse';
|
||||
|
||||
describe('esArchiver createParseArchiveStreams', () => {
|
||||
describe('{ gzip: false }', () => {
|
||||
it('returns an array of streams', () => {
|
||||
const streams = createParseArchiveStreams({ gzip: false });
|
||||
expect(streams).to.be.an('array');
|
||||
expect(streams.length).to.be.greaterThan(0);
|
||||
streams.forEach((s) => expect(s).to.be.a(Stream));
|
||||
expect(streams).toBeInstanceOf(Array);
|
||||
expect(streams.length).toBeGreaterThan(0);
|
||||
streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
|
||||
});
|
||||
|
||||
describe('streams', () => {
|
||||
|
@ -46,7 +45,7 @@ describe('esArchiver createParseArchiveStreams', () => {
|
|||
...createParseArchiveStreams({ gzip: false }),
|
||||
]);
|
||||
|
||||
expect(output).to.eql({ a: 1 });
|
||||
expect(output).toEqual({ a: 1 });
|
||||
});
|
||||
it('consume buffers of valid JSON separated by two newlines', async () => {
|
||||
const output = await createPromiseFromStreams([
|
||||
|
@ -63,7 +62,7 @@ describe('esArchiver createParseArchiveStreams', () => {
|
|||
createConcatStream([]),
|
||||
] as [Readable, ...Writable[]]);
|
||||
|
||||
expect(output).to.eql([{ a: 1 }, 1]);
|
||||
expect(output).toEqual([{ a: 1 }, 1]);
|
||||
});
|
||||
|
||||
it('provides each JSON object as soon as it is parsed', async () => {
|
||||
|
@ -87,10 +86,10 @@ describe('esArchiver createParseArchiveStreams', () => {
|
|||
] as [Readable, ...Writable[]]);
|
||||
|
||||
input.write(Buffer.from('{"a": 1}\n\n{"a":'));
|
||||
expect(await receivedPromise).to.eql({ a: 1 });
|
||||
expect(await receivedPromise).toEqual({ a: 1 });
|
||||
input.write(Buffer.from('2}'));
|
||||
input.end();
|
||||
expect(await finalPromise).to.eql([{ a: 1 }, { a: 2 }]);
|
||||
expect(await finalPromise).toEqual([{ a: 1 }, { a: 2 }]);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -108,7 +107,7 @@ describe('esArchiver createParseArchiveStreams', () => {
|
|||
] as [Readable, ...Writable[]]);
|
||||
throw new Error('should have failed');
|
||||
} catch (err) {
|
||||
expect(err.message).to.contain('Unexpected number');
|
||||
expect(err.message).toEqual(expect.stringContaining('Unexpected number'));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
@ -117,9 +116,9 @@ describe('esArchiver createParseArchiveStreams', () => {
|
|||
describe('{ gzip: true }', () => {
|
||||
it('returns an array of streams', () => {
|
||||
const streams = createParseArchiveStreams({ gzip: true });
|
||||
expect(streams).to.be.an('array');
|
||||
expect(streams.length).to.be.greaterThan(0);
|
||||
streams.forEach((s) => expect(s).to.be.a(Stream));
|
||||
expect(streams).toBeInstanceOf(Array);
|
||||
expect(streams.length).toBeGreaterThan(0);
|
||||
streams.forEach((s) => expect(s).toBeInstanceOf(Stream));
|
||||
});
|
||||
|
||||
describe('streams', () => {
|
||||
|
@ -135,7 +134,7 @@ describe('esArchiver createParseArchiveStreams', () => {
|
|||
...createParseArchiveStreams({ gzip: true }),
|
||||
]);
|
||||
|
||||
expect(output).to.eql({ a: 1 });
|
||||
expect(output).toEqual({ a: 1 });
|
||||
});
|
||||
|
||||
it('parses valid gzipped JSON strings separated by two newlines', async () => {
|
||||
|
@ -146,7 +145,7 @@ describe('esArchiver createParseArchiveStreams', () => {
|
|||
createConcatStream([]),
|
||||
] as [Readable, ...Writable[]]);
|
||||
|
||||
expect(output).to.eql([{ a: 1 }, { a: 2 }]);
|
||||
expect(output).toEqual([{ a: 1 }, { a: 2 }]);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -158,7 +157,7 @@ describe('esArchiver createParseArchiveStreams', () => {
|
|||
createConcatStream([]),
|
||||
] as [Readable, ...Writable[]]);
|
||||
|
||||
expect(output).to.eql([]);
|
||||
expect(output).toEqual([]);
|
||||
});
|
||||
|
||||
describe('stream errors', () => {
|
||||
|
@ -171,7 +170,7 @@ describe('esArchiver createParseArchiveStreams', () => {
|
|||
] as [Readable, ...Writable[]]);
|
||||
throw new Error('should have failed');
|
||||
} catch (err) {
|
||||
expect(err.message).to.contain('incorrect header check');
|
||||
expect(err.message).toEqual(expect.stringContaining('incorrect header check'));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
@ -183,7 +182,7 @@ describe('esArchiver createParseArchiveStreams', () => {
|
|||
createListStream([Buffer.from('{"a": 1}')]),
|
||||
...createParseArchiveStreams(),
|
||||
]);
|
||||
expect(output).to.eql({ a: 1 });
|
||||
expect(output).toEqual({ a: 1 });
|
||||
});
|
||||
});
|
||||
});
|
|
@ -18,22 +18,21 @@
|
|||
*/
|
||||
|
||||
import sinon from 'sinon';
|
||||
import expect from '@kbn/expect';
|
||||
import { delay } from 'bluebird';
|
||||
import { createListStream, createPromiseFromStreams, createConcatStream } from '@kbn/utils';
|
||||
|
||||
import { createGenerateDocRecordsStream } from '../generate_doc_records_stream';
|
||||
import { Progress } from '../../progress';
|
||||
import { createStubStats, createStubClient } from './stubs';
|
||||
import { createGenerateDocRecordsStream } from './generate_doc_records_stream';
|
||||
import { Progress } from '../progress';
|
||||
import { createStubStats, createStubClient } from './__mocks__/stubs';
|
||||
|
||||
describe('esArchiver: createGenerateDocRecordsStream()', () => {
|
||||
it('scolls 1000 documents at a time', async () => {
|
||||
const stats = createStubStats();
|
||||
const client = createStubClient([
|
||||
(name, params) => {
|
||||
expect(name).to.be('search');
|
||||
expect(params).to.have.property('index', 'logstash-*');
|
||||
expect(params).to.have.property('size', 1000);
|
||||
expect(name).toBe('search');
|
||||
expect(params).toHaveProperty('index', 'logstash-*');
|
||||
expect(params).toHaveProperty('size', 1000);
|
||||
return {
|
||||
hits: {
|
||||
total: 0,
|
||||
|
@ -49,18 +48,18 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
|
|||
createGenerateDocRecordsStream({ client, stats, progress }),
|
||||
]);
|
||||
|
||||
expect(progress.getTotal()).to.be(0);
|
||||
expect(progress.getComplete()).to.be(0);
|
||||
expect(progress.getTotal()).toBe(0);
|
||||
expect(progress.getComplete()).toBe(0);
|
||||
});
|
||||
|
||||
it('uses a 1 minute scroll timeout', async () => {
|
||||
const stats = createStubStats();
|
||||
const client = createStubClient([
|
||||
(name, params) => {
|
||||
expect(name).to.be('search');
|
||||
expect(params).to.have.property('index', 'logstash-*');
|
||||
expect(params).to.have.property('scroll', '1m');
|
||||
expect(params).to.have.property('rest_total_hits_as_int', true);
|
||||
expect(name).toBe('search');
|
||||
expect(params).toHaveProperty('index', 'logstash-*');
|
||||
expect(params).toHaveProperty('scroll', '1m');
|
||||
expect(params).toHaveProperty('rest_total_hits_as_int', true);
|
||||
return {
|
||||
hits: {
|
||||
total: 0,
|
||||
|
@ -76,8 +75,8 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
|
|||
createGenerateDocRecordsStream({ client, stats, progress }),
|
||||
]);
|
||||
|
||||
expect(progress.getTotal()).to.be(0);
|
||||
expect(progress.getComplete()).to.be(0);
|
||||
expect(progress.getTotal()).toBe(0);
|
||||
expect(progress.getComplete()).toBe(0);
|
||||
});
|
||||
|
||||
it('consumes index names and scrolls completely before continuing', async () => {
|
||||
|
@ -85,8 +84,8 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
|
|||
let checkpoint = Date.now();
|
||||
const client = createStubClient([
|
||||
async (name, params) => {
|
||||
expect(name).to.be('search');
|
||||
expect(params).to.have.property('index', 'index1');
|
||||
expect(name).toBe('search');
|
||||
expect(params).toHaveProperty('index', 'index1');
|
||||
await delay(200);
|
||||
return {
|
||||
_scroll_id: 'index1ScrollId',
|
||||
|
@ -94,17 +93,17 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
|
|||
};
|
||||
},
|
||||
async (name, params) => {
|
||||
expect(name).to.be('scroll');
|
||||
expect(params).to.have.property('scrollId', 'index1ScrollId');
|
||||
expect(Date.now() - checkpoint).to.not.be.lessThan(200);
|
||||
expect(name).toBe('scroll');
|
||||
expect(params).toHaveProperty('scrollId', 'index1ScrollId');
|
||||
expect(Date.now() - checkpoint).not.toBeLessThan(200);
|
||||
checkpoint = Date.now();
|
||||
await delay(200);
|
||||
return { hits: { total: 2, hits: [{ _id: 2, _index: 'foo' }] } };
|
||||
},
|
||||
async (name, params) => {
|
||||
expect(name).to.be('search');
|
||||
expect(params).to.have.property('index', 'index2');
|
||||
expect(Date.now() - checkpoint).to.not.be.lessThan(200);
|
||||
expect(name).toBe('search');
|
||||
expect(params).toHaveProperty('index', 'index2');
|
||||
expect(Date.now() - checkpoint).not.toBeLessThan(200);
|
||||
checkpoint = Date.now();
|
||||
await delay(200);
|
||||
return { hits: { total: 0, hits: [] } };
|
||||
|
@ -118,7 +117,7 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
|
|||
createConcatStream([]),
|
||||
]);
|
||||
|
||||
expect(docRecords).to.eql([
|
||||
expect(docRecords).toEqual([
|
||||
{
|
||||
type: 'doc',
|
||||
value: {
|
||||
|
@ -139,7 +138,7 @@ describe('esArchiver: createGenerateDocRecordsStream()', () => {
|
|||
},
|
||||
]);
|
||||
sinon.assert.calledTwice(stats.archivedDoc as any);
|
||||
expect(progress.getTotal()).to.be(2);
|
||||
expect(progress.getComplete()).to.be(2);
|
||||
expect(progress.getTotal()).toBe(2);
|
||||
expect(progress.getComplete()).toBe(2);
|
||||
});
|
||||
});
|
|
@ -17,13 +17,12 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { delay } from 'bluebird';
|
||||
import { createListStream, createPromiseFromStreams } from '@kbn/utils';
|
||||
|
||||
import { Progress } from '../../progress';
|
||||
import { createIndexDocRecordsStream } from '../index_doc_records_stream';
|
||||
import { createStubStats, createStubClient, createPersonDocRecords } from './stubs';
|
||||
import { Progress } from '../progress';
|
||||
import { createIndexDocRecordsStream } from './index_doc_records_stream';
|
||||
import { createStubStats, createStubClient, createPersonDocRecords } from './__mocks__/stubs';
|
||||
|
||||
const recordsToBulkBody = (records: any[]) => {
|
||||
return records.reduce((acc, record) => {
|
||||
|
@ -38,8 +37,8 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
|
|||
const records = createPersonDocRecords(1);
|
||||
const client = createStubClient([
|
||||
async (name, params) => {
|
||||
expect(name).to.be('bulk');
|
||||
expect(params).to.eql({
|
||||
expect(name).toBe('bulk');
|
||||
expect(params).toEqual({
|
||||
body: recordsToBulkBody(records),
|
||||
requestTimeout: 120000,
|
||||
});
|
||||
|
@ -55,24 +54,24 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
|
|||
]);
|
||||
|
||||
client.assertNoPendingResponses();
|
||||
expect(progress.getComplete()).to.be(1);
|
||||
expect(progress.getTotal()).to.be(undefined);
|
||||
expect(progress.getComplete()).toBe(1);
|
||||
expect(progress.getTotal()).toBe(undefined);
|
||||
});
|
||||
|
||||
it('consumes multiple doc records and sends to `_bulk` api together', async () => {
|
||||
const records = createPersonDocRecords(10);
|
||||
const client = createStubClient([
|
||||
async (name, params) => {
|
||||
expect(name).to.be('bulk');
|
||||
expect(params).to.eql({
|
||||
expect(name).toBe('bulk');
|
||||
expect(params).toEqual({
|
||||
body: recordsToBulkBody(records.slice(0, 1)),
|
||||
requestTimeout: 120000,
|
||||
});
|
||||
return { ok: true };
|
||||
},
|
||||
async (name, params) => {
|
||||
expect(name).to.be('bulk');
|
||||
expect(params).to.eql({
|
||||
expect(name).toBe('bulk');
|
||||
expect(params).toEqual({
|
||||
body: recordsToBulkBody(records.slice(1)),
|
||||
requestTimeout: 120000,
|
||||
});
|
||||
|
@ -88,8 +87,8 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
|
|||
]);
|
||||
|
||||
client.assertNoPendingResponses();
|
||||
expect(progress.getComplete()).to.be(10);
|
||||
expect(progress.getTotal()).to.be(undefined);
|
||||
expect(progress.getComplete()).toBe(10);
|
||||
expect(progress.getTotal()).toBe(undefined);
|
||||
});
|
||||
|
||||
it('waits until request is complete before sending more', async () => {
|
||||
|
@ -99,8 +98,8 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
|
|||
const delayMs = 1234;
|
||||
const client = createStubClient([
|
||||
async (name, params) => {
|
||||
expect(name).to.be('bulk');
|
||||
expect(params).to.eql({
|
||||
expect(name).toBe('bulk');
|
||||
expect(params).toEqual({
|
||||
body: recordsToBulkBody(records.slice(0, 1)),
|
||||
requestTimeout: 120000,
|
||||
});
|
||||
|
@ -108,12 +107,12 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
|
|||
return { ok: true };
|
||||
},
|
||||
async (name, params) => {
|
||||
expect(name).to.be('bulk');
|
||||
expect(params).to.eql({
|
||||
expect(name).toBe('bulk');
|
||||
expect(params).toEqual({
|
||||
body: recordsToBulkBody(records.slice(1)),
|
||||
requestTimeout: 120000,
|
||||
});
|
||||
expect(Date.now() - start).to.not.be.lessThan(delayMs);
|
||||
expect(Date.now() - start).not.toBeLessThan(delayMs);
|
||||
return { ok: true };
|
||||
},
|
||||
]);
|
||||
|
@ -125,8 +124,8 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
|
|||
]);
|
||||
|
||||
client.assertNoPendingResponses();
|
||||
expect(progress.getComplete()).to.be(10);
|
||||
expect(progress.getTotal()).to.be(undefined);
|
||||
expect(progress.getComplete()).toBe(10);
|
||||
expect(progress.getTotal()).toBe(undefined);
|
||||
});
|
||||
|
||||
it('sends a maximum of 300 documents at a time', async () => {
|
||||
|
@ -134,18 +133,18 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
|
|||
const stats = createStubStats();
|
||||
const client = createStubClient([
|
||||
async (name, params) => {
|
||||
expect(name).to.be('bulk');
|
||||
expect(params.body.length).to.eql(1 * 2);
|
||||
expect(name).toBe('bulk');
|
||||
expect(params.body.length).toEqual(1 * 2);
|
||||
return { ok: true };
|
||||
},
|
||||
async (name, params) => {
|
||||
expect(name).to.be('bulk');
|
||||
expect(params.body.length).to.eql(299 * 2);
|
||||
expect(name).toBe('bulk');
|
||||
expect(params.body.length).toEqual(299 * 2);
|
||||
return { ok: true };
|
||||
},
|
||||
async (name, params) => {
|
||||
expect(name).to.be('bulk');
|
||||
expect(params.body.length).to.eql(1 * 2);
|
||||
expect(name).toBe('bulk');
|
||||
expect(params.body.length).toEqual(1 * 2);
|
||||
return { ok: true };
|
||||
},
|
||||
]);
|
||||
|
@ -157,8 +156,8 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
|
|||
]);
|
||||
|
||||
client.assertNoPendingResponses();
|
||||
expect(progress.getComplete()).to.be(301);
|
||||
expect(progress.getTotal()).to.be(undefined);
|
||||
expect(progress.getComplete()).toBe(301);
|
||||
expect(progress.getTotal()).toBe(undefined);
|
||||
});
|
||||
|
||||
it('emits an error if any request fails', async () => {
|
||||
|
@ -177,11 +176,11 @@ describe('esArchiver: createIndexDocRecordsStream()', () => {
|
|||
]);
|
||||
throw new Error('expected stream to emit error');
|
||||
} catch (err) {
|
||||
expect(err.message).to.match(/"forcedError":\s*true/);
|
||||
expect(err.message).toMatch(/"forcedError":\s*true/);
|
||||
}
|
||||
|
||||
client.assertNoPendingResponses();
|
||||
expect(progress.getComplete()).to.be(1);
|
||||
expect(progress.getTotal()).to.be(undefined);
|
||||
expect(progress.getComplete()).toBe(1);
|
||||
expect(progress.getTotal()).toBe(undefined);
|
||||
});
|
||||
});
|
|
@ -17,12 +17,11 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import sinon from 'sinon';
|
||||
import Chance from 'chance';
|
||||
import { createPromiseFromStreams, createConcatStream, createListStream } from '@kbn/utils';
|
||||
|
||||
import { createCreateIndexStream } from '../create_index_stream';
|
||||
import { createCreateIndexStream } from './create_index_stream';
|
||||
|
||||
import {
|
||||
createStubStats,
|
||||
|
@ -30,7 +29,7 @@ import {
|
|||
createStubDocRecord,
|
||||
createStubClient,
|
||||
createStubLogger,
|
||||
} from './stubs';
|
||||
} from './__mocks__/stubs';
|
||||
|
||||
const chance = new Chance();
|
||||
|
||||
|
@ -49,7 +48,7 @@ describe('esArchiver: createCreateIndexStream()', () => {
|
|||
createCreateIndexStream({ client, stats, log }),
|
||||
]);
|
||||
|
||||
expect(stats.getTestSummary()).to.eql({
|
||||
expect(stats.getTestSummary()).toEqual({
|
||||
deletedIndex: 1,
|
||||
createdIndex: 2,
|
||||
});
|
||||
|
@ -68,13 +67,13 @@ describe('esArchiver: createCreateIndexStream()', () => {
|
|||
createCreateIndexStream({ client, stats, log }),
|
||||
]);
|
||||
|
||||
expect((client.indices.getAlias as sinon.SinonSpy).calledOnce).to.be.ok();
|
||||
expect((client.indices.getAlias as sinon.SinonSpy).args[0][0]).to.eql({
|
||||
expect((client.indices.getAlias as sinon.SinonSpy).calledOnce).toBe(true);
|
||||
expect((client.indices.getAlias as sinon.SinonSpy).args[0][0]).toEqual({
|
||||
name: 'existing-index',
|
||||
ignore: [404],
|
||||
});
|
||||
expect((client.indices.delete as sinon.SinonSpy).calledOnce).to.be.ok();
|
||||
expect((client.indices.delete as sinon.SinonSpy).args[0][0]).to.eql({
|
||||
expect((client.indices.delete as sinon.SinonSpy).calledOnce).toBe(true);
|
||||
expect((client.indices.delete as sinon.SinonSpy).args[0][0]).toEqual({
|
||||
index: ['actual-index'],
|
||||
});
|
||||
sinon.assert.callCount(client.indices.create as sinon.SinonSpy, 3); // one failed create because of existing
|
||||
|
@ -93,7 +92,7 @@ describe('esArchiver: createCreateIndexStream()', () => {
|
|||
createConcatStream([]),
|
||||
]);
|
||||
|
||||
expect(output).to.eql([createStubDocRecord('index', 1), createStubDocRecord('index', 2)]);
|
||||
expect(output).toEqual([createStubDocRecord('index', 1), createStubDocRecord('index', 2)]);
|
||||
});
|
||||
|
||||
it('creates aliases', async () => {
|
||||
|
@ -133,7 +132,7 @@ describe('esArchiver: createCreateIndexStream()', () => {
|
|||
createConcatStream([]),
|
||||
]);
|
||||
|
||||
expect(output).to.eql(randoms);
|
||||
expect(output).toEqual(randoms);
|
||||
});
|
||||
|
||||
it('passes through non-record values', async () => {
|
||||
|
@ -147,7 +146,7 @@ describe('esArchiver: createCreateIndexStream()', () => {
|
|||
createConcatStream([]),
|
||||
]);
|
||||
|
||||
expect(output).to.eql(nonRecordValues);
|
||||
expect(output).toEqual(nonRecordValues);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -169,13 +168,13 @@ describe('esArchiver: createCreateIndexStream()', () => {
|
|||
}),
|
||||
]);
|
||||
|
||||
expect(stats.getTestSummary()).to.eql({
|
||||
expect(stats.getTestSummary()).toEqual({
|
||||
skippedIndex: 1,
|
||||
createdIndex: 1,
|
||||
});
|
||||
sinon.assert.callCount(client.indices.delete as sinon.SinonSpy, 0);
|
||||
sinon.assert.callCount(client.indices.create as sinon.SinonSpy, 2); // one failed create because of existing
|
||||
expect((client.indices.create as sinon.SinonSpy).args[0][0]).to.have.property(
|
||||
expect((client.indices.create as sinon.SinonSpy).args[0][0]).toHaveProperty(
|
||||
'index',
|
||||
'new-index'
|
||||
);
|
||||
|
@ -203,15 +202,15 @@ describe('esArchiver: createCreateIndexStream()', () => {
|
|||
createConcatStream([]),
|
||||
]);
|
||||
|
||||
expect(stats.getTestSummary()).to.eql({
|
||||
expect(stats.getTestSummary()).toEqual({
|
||||
skippedIndex: 1,
|
||||
createdIndex: 1,
|
||||
});
|
||||
sinon.assert.callCount(client.indices.delete as sinon.SinonSpy, 0);
|
||||
sinon.assert.callCount(client.indices.create as sinon.SinonSpy, 2); // one failed create because of existing
|
||||
|
||||
expect(output).to.have.length(2);
|
||||
expect(output).to.eql([
|
||||
expect(output).toHaveLength(2);
|
||||
expect(output).toEqual([
|
||||
createStubDocRecord('new-index', 1),
|
||||
createStubDocRecord('new-index', 2),
|
||||
]);
|
|
@ -21,14 +21,14 @@ import sinon from 'sinon';
|
|||
|
||||
import { createListStream, createPromiseFromStreams } from '@kbn/utils';
|
||||
|
||||
import { createDeleteIndexStream } from '../delete_index_stream';
|
||||
import { createDeleteIndexStream } from './delete_index_stream';
|
||||
|
||||
import {
|
||||
createStubStats,
|
||||
createStubClient,
|
||||
createStubIndexRecord,
|
||||
createStubLogger,
|
||||
} from './stubs';
|
||||
} from './__mocks__/stubs';
|
||||
|
||||
const log = createStubLogger();
|
||||
|
|
@ -18,12 +18,11 @@
|
|||
*/
|
||||
|
||||
import sinon from 'sinon';
|
||||
import expect from '@kbn/expect';
|
||||
import { createListStream, createPromiseFromStreams, createConcatStream } from '@kbn/utils';
|
||||
|
||||
import { createStubClient, createStubStats } from './stubs';
|
||||
import { createStubClient, createStubStats } from './__mocks__/stubs';
|
||||
|
||||
import { createGenerateIndexRecordsStream } from '../generate_index_records_stream';
|
||||
import { createGenerateIndexRecordsStream } from './generate_index_records_stream';
|
||||
|
||||
describe('esArchiver: createGenerateIndexRecordsStream()', () => {
|
||||
it('consumes index names and queries for the mapping of each', async () => {
|
||||
|
@ -36,7 +35,7 @@ describe('esArchiver: createGenerateIndexRecordsStream()', () => {
|
|||
createGenerateIndexRecordsStream(client, stats),
|
||||
]);
|
||||
|
||||
expect(stats.getTestSummary()).to.eql({
|
||||
expect(stats.getTestSummary()).toEqual({
|
||||
archivedIndex: 4,
|
||||
});
|
||||
|
||||
|
@ -56,12 +55,12 @@ describe('esArchiver: createGenerateIndexRecordsStream()', () => {
|
|||
]);
|
||||
|
||||
const params = (client.indices.get as sinon.SinonSpy).args[0][0];
|
||||
expect(params).to.have.property('filterPath');
|
||||
expect(params).toHaveProperty('filterPath');
|
||||
const filters: string[] = params.filterPath;
|
||||
expect(filters.some((path) => path.includes('index.creation_date'))).to.be(true);
|
||||
expect(filters.some((path) => path.includes('index.uuid'))).to.be(true);
|
||||
expect(filters.some((path) => path.includes('index.version'))).to.be(true);
|
||||
expect(filters.some((path) => path.includes('index.provided_name'))).to.be(true);
|
||||
expect(filters.some((path) => path.includes('index.creation_date'))).toBe(true);
|
||||
expect(filters.some((path) => path.includes('index.uuid'))).toBe(true);
|
||||
expect(filters.some((path) => path.includes('index.version'))).toBe(true);
|
||||
expect(filters.some((path) => path.includes('index.provided_name'))).toBe(true);
|
||||
});
|
||||
|
||||
it('produces one index record for each index name it receives', async () => {
|
||||
|
@ -74,19 +73,19 @@ describe('esArchiver: createGenerateIndexRecordsStream()', () => {
|
|||
createConcatStream([]),
|
||||
]);
|
||||
|
||||
expect(indexRecords).to.have.length(3);
|
||||
expect(indexRecords).toHaveLength(3);
|
||||
|
||||
expect(indexRecords[0]).to.have.property('type', 'index');
|
||||
expect(indexRecords[0]).to.have.property('value');
|
||||
expect(indexRecords[0].value).to.have.property('index', 'index1');
|
||||
expect(indexRecords[0]).toHaveProperty('type', 'index');
|
||||
expect(indexRecords[0]).toHaveProperty('value');
|
||||
expect(indexRecords[0].value).toHaveProperty('index', 'index1');
|
||||
|
||||
expect(indexRecords[1]).to.have.property('type', 'index');
|
||||
expect(indexRecords[1]).to.have.property('value');
|
||||
expect(indexRecords[1].value).to.have.property('index', 'index2');
|
||||
expect(indexRecords[1]).toHaveProperty('type', 'index');
|
||||
expect(indexRecords[1]).toHaveProperty('value');
|
||||
expect(indexRecords[1].value).toHaveProperty('index', 'index2');
|
||||
|
||||
expect(indexRecords[2]).to.have.property('type', 'index');
|
||||
expect(indexRecords[2]).to.have.property('value');
|
||||
expect(indexRecords[2].value).to.have.property('index', 'index3');
|
||||
expect(indexRecords[2]).toHaveProperty('type', 'index');
|
||||
expect(indexRecords[2]).toHaveProperty('value');
|
||||
expect(indexRecords[2].value).toHaveProperty('index', 'index3');
|
||||
});
|
||||
|
||||
it('understands aliases', async () => {
|
||||
|
@ -99,7 +98,7 @@ describe('esArchiver: createGenerateIndexRecordsStream()', () => {
|
|||
createConcatStream([]),
|
||||
]);
|
||||
|
||||
expect(indexRecords).to.eql([
|
||||
expect(indexRecords).toEqual([
|
||||
{
|
||||
type: 'index',
|
||||
value: {
|
|
@ -18,11 +18,10 @@
|
|||
*/
|
||||
|
||||
import Chance from 'chance';
|
||||
import expect from '@kbn/expect';
|
||||
|
||||
import { createListStream, createPromiseFromStreams, createConcatStream } from '@kbn/utils';
|
||||
|
||||
import { createFilterRecordsStream } from '../filter_records_stream';
|
||||
import { createFilterRecordsStream } from './filter_records_stream';
|
||||
|
||||
const chance = new Chance();
|
||||
|
||||
|
@ -42,7 +41,7 @@ describe('esArchiver: createFilterRecordsStream()', () => {
|
|||
createConcatStream([]),
|
||||
]);
|
||||
|
||||
expect(output).to.eql([]);
|
||||
expect(output).toEqual([]);
|
||||
});
|
||||
|
||||
it('produces record values that have a matching type', async () => {
|
||||
|
@ -61,7 +60,7 @@ describe('esArchiver: createFilterRecordsStream()', () => {
|
|||
createConcatStream([]),
|
||||
]);
|
||||
|
||||
expect(output).to.have.length(3);
|
||||
expect(output.map((o) => o.type)).to.eql([type1, type1, type1]);
|
||||
expect(output).toHaveLength(3);
|
||||
expect(output.map((o) => o.type)).toEqual([type1, type1, type1]);
|
||||
});
|
||||
});
|
|
@ -2,5 +2,6 @@
|
|||
|
||||
source src/dev/ci_setup/setup_env.sh
|
||||
|
||||
checks-reporter-with-killswitch "Mocha Tests" \
|
||||
node scripts/mocha
|
||||
# TODO: will remove mocha in another PR
|
||||
# checks-reporter-with-killswitch "Mocha Tests" \
|
||||
# node scripts/mocha
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue