mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
Co-authored-by: spalger <spalger@users.noreply.github.com> Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com> Co-authored-by: Spencer <email@spalger.com>
This commit is contained in:
parent
c8bf9de3da
commit
a26bed78b0
17 changed files with 963 additions and 14 deletions
|
@ -12,7 +12,17 @@ kibanaPipeline(timeoutMinutes: 120) {
|
|||
]) {
|
||||
parallel([
|
||||
'oss-baseline': {
|
||||
workers.ci(name: 'oss-baseline', size: 'l', ramDisk: true, runErrorReporter: false) {
|
||||
workers.ci(name: 'oss-baseline', size: 'l', ramDisk: true, runErrorReporter: false, bootstrapped: false) {
|
||||
// bootstrap ourselves, but with the env needed to upload the ts refs cache
|
||||
withGcpServiceAccount.fromVaultSecret('secret/kibana-issues/dev/ci-artifacts-key', 'value') {
|
||||
withEnv([
|
||||
'BUILD_TS_REFS_CACHE_ENABLE=true',
|
||||
'BUILD_TS_REFS_CACHE_CAPTURE=true'
|
||||
]) {
|
||||
kibanaPipeline.doSetup()
|
||||
}
|
||||
}
|
||||
|
||||
kibanaPipeline.functionalTestProcess('oss-baseline', './test/scripts/jenkins_baseline.sh')()
|
||||
}
|
||||
},
|
||||
|
|
|
@ -7,14 +7,14 @@
|
|||
*/
|
||||
|
||||
import path from 'path';
|
||||
import { kibanaPackageJSON } from './';
|
||||
import { kibanaPackageJson } from './';
|
||||
|
||||
it('parses package.json', () => {
|
||||
expect(kibanaPackageJSON.name).toEqual('kibana');
|
||||
expect(kibanaPackageJson.name).toEqual('kibana');
|
||||
});
|
||||
|
||||
it('includes __dirname and __filename', () => {
|
||||
const root = path.resolve(__dirname, '../../../../');
|
||||
expect(kibanaPackageJSON.__filename).toEqual(path.resolve(root, 'package.json'));
|
||||
expect(kibanaPackageJSON.__dirname).toEqual(root);
|
||||
expect(kibanaPackageJson.__filename).toEqual(path.resolve(root, 'package.json'));
|
||||
expect(kibanaPackageJson.__dirname).toEqual(root);
|
||||
});
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
import { dirname, resolve } from 'path';
|
||||
import { REPO_ROOT } from '../repo_root';
|
||||
|
||||
export const kibanaPackageJSON = {
|
||||
export const kibanaPackageJson = {
|
||||
__filename: resolve(REPO_ROOT, 'package.json'),
|
||||
__dirname: dirname(resolve(REPO_ROOT, 'package.json')),
|
||||
...require(resolve(REPO_ROOT, 'package.json')),
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { kibanaPackageJSON as pkg } from '@kbn/dev-utils';
|
||||
import { kibanaPackageJson as pkg } from '@kbn/dev-utils';
|
||||
|
||||
import { getVersionInfo } from '../version_info';
|
||||
|
||||
|
|
|
@ -25,7 +25,16 @@ echo "build --remote_header=x-buildbuddy-api-key=$KIBANA_BUILDBUDDY_CI_API_KEY"
|
|||
### install dependencies
|
||||
###
|
||||
echo " -- installing node.js dependencies"
|
||||
yarn kbn bootstrap
|
||||
yarn kbn bootstrap --verbose
|
||||
|
||||
###
|
||||
### upload ts-refs-cache artifacts as quickly as possible so they are available for download
|
||||
###
|
||||
if [[ "$BUILD_TS_REFS_CACHE_CAPTURE" == "true" ]]; then
|
||||
cd "$KIBANA_DIR/target/ts_refs_cache"
|
||||
gsutil cp "*.zip" 'gs://kibana-ci-ts-refs-cache/'
|
||||
cd "$KIBANA_DIR"
|
||||
fi
|
||||
|
||||
###
|
||||
### Download es snapshots
|
||||
|
|
|
@ -6,28 +6,58 @@
|
|||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { run } from '@kbn/dev-utils';
|
||||
import Path from 'path';
|
||||
|
||||
import { run, REPO_ROOT } from '@kbn/dev-utils';
|
||||
import del from 'del';
|
||||
|
||||
import { RefOutputCache } from './ref_output_cache';
|
||||
import { buildAllTsRefs, REF_CONFIG_PATHS } from './build_ts_refs';
|
||||
import { getOutputsDeep } from './ts_configfile';
|
||||
import { concurrentMap } from './concurrent_map';
|
||||
|
||||
const CACHE_WORKING_DIR = Path.resolve(REPO_ROOT, 'data/ts_refs_output_cache');
|
||||
|
||||
export async function runBuildRefsCli() {
|
||||
run(
|
||||
async ({ log, flags }) => {
|
||||
const outDirs = getOutputsDeep(REF_CONFIG_PATHS);
|
||||
|
||||
if (flags.clean) {
|
||||
const outDirs = getOutputsDeep(REF_CONFIG_PATHS);
|
||||
log.info('deleting', outDirs.length, 'ts output directories');
|
||||
await concurrentMap(100, outDirs, (outDir) => del(outDir));
|
||||
}
|
||||
|
||||
let outputCache;
|
||||
if (flags.cache) {
|
||||
outputCache = await RefOutputCache.create({
|
||||
log,
|
||||
outDirs,
|
||||
repoRoot: REPO_ROOT,
|
||||
workingDir: CACHE_WORKING_DIR,
|
||||
upstreamUrl: 'https://github.com/elastic/kibana.git',
|
||||
});
|
||||
|
||||
await outputCache.initCaches();
|
||||
}
|
||||
|
||||
await buildAllTsRefs(log);
|
||||
|
||||
if (outputCache) {
|
||||
if (process.env.BUILD_TS_REFS_CACHE_CAPTURE === 'true') {
|
||||
await outputCache.captureCache(Path.resolve(REPO_ROOT, 'target/ts_refs_cache'));
|
||||
}
|
||||
|
||||
await outputCache.cleanup();
|
||||
}
|
||||
},
|
||||
{
|
||||
description: 'Build TypeScript projects',
|
||||
flags: {
|
||||
boolean: ['clean'],
|
||||
boolean: ['clean', 'cache'],
|
||||
default: {
|
||||
cache: process.env.BUILD_TS_REFS_CACHE_ENABLE === 'true' ? true : false,
|
||||
},
|
||||
},
|
||||
log: {
|
||||
defaultLevel: 'debug',
|
||||
|
|
17
src/dev/typescript/ref_output_cache/README.md
Normal file
17
src/dev/typescript/ref_output_cache/README.md
Normal file
|
@ -0,0 +1,17 @@
|
|||
# `node scripts/build_ts_refs` output cache
|
||||
|
||||
This module implements the logic for caching the output of building the ts refs and extracting those caches into the source repo to speed up the execution of this script. We've implemented this as a stop-gap solution while we migrate to Bazel which will handle caching the types produced by the
|
||||
scripts independently and speed things up incredibly, but in the meantime we need something to fix the 10 minute bootstrap times we're seeing.
|
||||
|
||||
How it works:
|
||||
|
||||
1. traverse the TS projects referenced from `tsconfig.refs.json` and collect their `compilerOptions.outDir` setting.
|
||||
2. determine the `upstreamBranch` by reading the `branch` property out of `package.json`
|
||||
3. fetch the latest changes from `https://github.com/elastic/kibana.git` for that branch
|
||||
4. determine the merge base between `HEAD` and the latest ref from the `upstreamBranch`
|
||||
5. check in the `data/ts_refs_output_cache/archives` dir (where we keep the 10 most recent downloads) and at `https://ts-refs-cache.kibana.dev/{sha}.zip` for the cache of the merge base commit, and up to 5 commits before that in the log, stopping once we find one that is available locally or was downloaded.
|
||||
6. check for the `.ts-ref-cache-merge-base` file in each `outDir`, which records the `mergeBase` that was used to initialize that `outDir`, if the file exists and matches the `sha` that we plan to use for our cache then exclude that `outDir` from getting initialized with the cache data
|
||||
7. for each `outDir` that either hasn't been initialized with cache data or was initialized with cache data from another merge base, delete the `outDir` and replace it with the copy stored in the downloaded cache
|
||||
1. if there isn't a cached version of that `outDir` replace it with an empty directory
|
||||
8. write the current `mergeBase` to the `.ts-ref-cache-merge-base` file in each `outDir`
|
||||
9. run `tsc`, which will only build things which have changed since the cache was created
|
186
src/dev/typescript/ref_output_cache/archives.ts
Normal file
186
src/dev/typescript/ref_output_cache/archives.ts
Normal file
|
@ -0,0 +1,186 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Fs from 'fs/promises';
|
||||
import { createWriteStream } from 'fs';
|
||||
import Path from 'path';
|
||||
import { promisify } from 'util';
|
||||
import { pipeline } from 'stream';
|
||||
|
||||
import { ToolingLog } from '@kbn/dev-utils';
|
||||
import Axios from 'axios';
|
||||
import del from 'del';
|
||||
|
||||
// https://github.com/axios/axios/tree/ffea03453f77a8176c51554d5f6c3c6829294649/lib/adapters
|
||||
// @ts-expect-error untyped internal module used to prevent axios from using xhr adapter in tests
|
||||
import AxiosHttpAdapter from 'axios/lib/adapters/http';
|
||||
|
||||
interface Archive {
|
||||
sha: string;
|
||||
path: string;
|
||||
time: number;
|
||||
}
|
||||
|
||||
const asyncPipeline = promisify(pipeline);
|
||||
|
||||
async function getCacheNames(cacheDir: string) {
|
||||
try {
|
||||
return await Fs.readdir(cacheDir);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return [];
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export class Archives {
|
||||
static async create(log: ToolingLog, workingDir: string) {
|
||||
const dir = Path.resolve(workingDir, 'archives');
|
||||
const bySha = new Map<string, Archive>();
|
||||
|
||||
for (const name of await getCacheNames(dir)) {
|
||||
const path = Path.resolve(dir, name);
|
||||
|
||||
if (!name.endsWith('.zip')) {
|
||||
log.debug('deleting unexpected file in archives dir', path);
|
||||
await Fs.unlink(path);
|
||||
continue;
|
||||
}
|
||||
|
||||
const sha = name.replace('.zip', '');
|
||||
log.verbose('identified archive for', sha);
|
||||
const s = await Fs.stat(path);
|
||||
const time = Math.max(s.atimeMs, s.mtimeMs);
|
||||
bySha.set(sha, {
|
||||
path,
|
||||
time,
|
||||
sha,
|
||||
});
|
||||
}
|
||||
|
||||
return new Archives(log, workingDir, bySha);
|
||||
}
|
||||
|
||||
protected constructor(
|
||||
private readonly log: ToolingLog,
|
||||
private readonly workDir: string,
|
||||
private readonly bySha: Map<string, Archive>
|
||||
) {}
|
||||
|
||||
size() {
|
||||
return this.bySha.size;
|
||||
}
|
||||
|
||||
get(sha: string) {
|
||||
return this.bySha.get(sha);
|
||||
}
|
||||
|
||||
async delete(sha: string) {
|
||||
const archive = this.get(sha);
|
||||
if (archive) {
|
||||
await Fs.unlink(archive.path);
|
||||
this.bySha.delete(sha);
|
||||
}
|
||||
}
|
||||
|
||||
*[Symbol.iterator]() {
|
||||
yield* this.bySha.values();
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to download the cache for a given sha, adding it to this.bySha
|
||||
* and returning true if successful, logging and returning false otherwise.
|
||||
*
|
||||
* @param sha the commit sha we should try to download the cache for
|
||||
*/
|
||||
async attemptToDownload(sha: string) {
|
||||
if (this.bySha.has(sha)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const url = `https://ts-refs-cache.kibana.dev/${sha}.zip`;
|
||||
this.log.debug('attempting to download cache for', sha, 'from', url);
|
||||
|
||||
const filename = `${sha}.zip`;
|
||||
const target = Path.resolve(this.workDir, 'archives', `${filename}`);
|
||||
const tmpTarget = `${target}.tmp`;
|
||||
|
||||
try {
|
||||
const resp = await Axios.request({
|
||||
url,
|
||||
responseType: 'stream',
|
||||
adapter: AxiosHttpAdapter,
|
||||
});
|
||||
|
||||
await Fs.mkdir(Path.dirname(target), { recursive: true });
|
||||
await asyncPipeline(resp.data, createWriteStream(tmpTarget));
|
||||
this.log.debug('download complete, renaming tmp');
|
||||
|
||||
await Fs.rename(tmpTarget, target);
|
||||
this.bySha.set(sha, {
|
||||
sha,
|
||||
path: target,
|
||||
time: Date.now(),
|
||||
});
|
||||
|
||||
this.log.debug('download of cache for', sha, 'complete');
|
||||
return true;
|
||||
} catch (error) {
|
||||
await del(tmpTarget, { force: true });
|
||||
|
||||
if (!error.response) {
|
||||
this.log.debug(`failed to download cache, ignoring error:`, error.message);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (error.response.status === 404) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.log.debug(`failed to download cache,`, error.response.status, 'response');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate through a list of shas, which represent commits
|
||||
* on our upstreamBranch, and look for caches which are
|
||||
* already downloaded, or try to download them. If the cache
|
||||
* for that commit is not available for any reason the next
|
||||
* sha will be tried.
|
||||
*
|
||||
* If we reach the end of the list without any caches being
|
||||
* available undefined is returned.
|
||||
*
|
||||
* @param shas shas for commits to try and find caches for
|
||||
*/
|
||||
async getFirstAvailable(shas: string[]): Promise<Archive | undefined> {
|
||||
if (!shas.length) {
|
||||
throw new Error('no possible shas to pick archive from');
|
||||
}
|
||||
|
||||
for (const sha of shas) {
|
||||
let archive = this.bySha.get(sha);
|
||||
|
||||
// if we don't have one locally try to download one
|
||||
if (!archive && (await this.attemptToDownload(sha))) {
|
||||
archive = this.bySha.get(sha);
|
||||
}
|
||||
|
||||
// if we found the archive return it
|
||||
if (archive) {
|
||||
return archive;
|
||||
}
|
||||
|
||||
this.log.debug('no archive available for', sha);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
}
|
9
src/dev/typescript/ref_output_cache/index.ts
Normal file
9
src/dev/typescript/ref_output_cache/index.ts
Normal file
|
@ -0,0 +1,9 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
export * from './ref_output_cache';
|
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,240 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
import Fs from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
import del from 'del';
|
||||
import cpy from 'cpy';
|
||||
import {
|
||||
ToolingLog,
|
||||
createAbsolutePathSerializer,
|
||||
createRecursiveSerializer,
|
||||
ToolingLogCollectingWriter,
|
||||
createStripAnsiSerializer,
|
||||
} from '@kbn/dev-utils';
|
||||
|
||||
expect.addSnapshotSerializer(createAbsolutePathSerializer());
|
||||
expect.addSnapshotSerializer(createStripAnsiSerializer());
|
||||
expect.addSnapshotSerializer(
|
||||
createRecursiveSerializer(
|
||||
(v) => typeof v === 'object' && v && typeof v.time === 'number',
|
||||
(v) => ({ ...v, time: '<number>' })
|
||||
)
|
||||
);
|
||||
|
||||
jest.mock('axios', () => {
|
||||
return {
|
||||
request: jest.fn(),
|
||||
};
|
||||
});
|
||||
const mockRequest: jest.Mock = jest.requireMock('axios').request;
|
||||
|
||||
import { Archives } from '../archives';
|
||||
|
||||
const FIXTURE = Path.resolve(__dirname, '__fixtures__');
|
||||
const TMP = Path.resolve(__dirname, '__tmp__');
|
||||
|
||||
beforeAll(() => del(TMP, { force: true }));
|
||||
beforeEach(() => cpy('.', TMP, { cwd: FIXTURE, parents: true }));
|
||||
afterEach(async () => {
|
||||
await del(TMP, { force: true });
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
|
||||
const readArchiveDir = () =>
|
||||
Fs.readdirSync(Path.resolve(TMP, 'archives')).sort((a, b) => a.localeCompare(b));
|
||||
|
||||
const log = new ToolingLog();
|
||||
const logWriter = new ToolingLogCollectingWriter();
|
||||
log.setWriters([logWriter]);
|
||||
afterEach(() => (logWriter.messages.length = 0));
|
||||
|
||||
it('deletes invalid files', async () => {
|
||||
const path = Path.resolve(TMP, 'archives/foo.txt');
|
||||
Fs.writeFileSync(path, 'hello');
|
||||
const archives = await Archives.create(log, TMP);
|
||||
|
||||
expect(archives.size()).toBe(2);
|
||||
expect(Fs.existsSync(path)).toBe(false);
|
||||
});
|
||||
|
||||
it('exposes archives by sha', async () => {
|
||||
const archives = await Archives.create(log, TMP);
|
||||
expect(archives.get('1234')).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"path": <absolute path>/src/dev/typescript/ref_output_cache/integration_tests/__tmp__/archives/1234.zip,
|
||||
"sha": "1234",
|
||||
"time": "<number>",
|
||||
}
|
||||
`);
|
||||
expect(archives.get('5678')).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"path": <absolute path>/src/dev/typescript/ref_output_cache/integration_tests/__tmp__/archives/5678.zip,
|
||||
"sha": "5678",
|
||||
"time": "<number>",
|
||||
}
|
||||
`);
|
||||
expect(archives.get('foo')).toMatchInlineSnapshot(`undefined`);
|
||||
});
|
||||
|
||||
it('deletes archives', async () => {
|
||||
const archives = await Archives.create(log, TMP);
|
||||
expect(archives.size()).toBe(2);
|
||||
await archives.delete('1234');
|
||||
expect(archives.size()).toBe(1);
|
||||
expect(readArchiveDir()).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"5678.zip",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('returns false when attempting to download for sha without cache', async () => {
|
||||
const archives = await Archives.create(log, TMP);
|
||||
|
||||
mockRequest.mockImplementation(() => {
|
||||
throw new Error('404!');
|
||||
});
|
||||
|
||||
await expect(archives.attemptToDownload('foobar')).resolves.toBe(false);
|
||||
});
|
||||
|
||||
it('returns true when able to download an archive for a sha', async () => {
|
||||
const archives = await Archives.create(log, TMP);
|
||||
|
||||
mockRequest.mockImplementation(() => {
|
||||
return {
|
||||
data: Readable.from('foobar zip contents'),
|
||||
};
|
||||
});
|
||||
|
||||
expect(archives.size()).toBe(2);
|
||||
await expect(archives.attemptToDownload('foobar')).resolves.toBe(true);
|
||||
expect(archives.size()).toBe(3);
|
||||
expect(readArchiveDir()).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"1234.zip",
|
||||
"5678.zip",
|
||||
"foobar.zip",
|
||||
]
|
||||
`);
|
||||
expect(Fs.readFileSync(Path.resolve(TMP, 'archives/foobar.zip'), 'utf-8')).toBe(
|
||||
'foobar zip contents'
|
||||
);
|
||||
});
|
||||
|
||||
it('returns true if attempting to download a cache which is already downloaded', async () => {
|
||||
const archives = await Archives.create(log, TMP);
|
||||
|
||||
mockRequest.mockImplementation(() => {
|
||||
throw new Error(`it shouldn't try to download anything`);
|
||||
});
|
||||
|
||||
expect(archives.size()).toBe(2);
|
||||
await expect(archives.attemptToDownload('1234')).resolves.toBe(true);
|
||||
expect(archives.size()).toBe(2);
|
||||
expect(readArchiveDir()).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"1234.zip",
|
||||
"5678.zip",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('returns false and deletes the zip if the download fails part way', async () => {
|
||||
const archives = await Archives.create(log, TMP);
|
||||
|
||||
mockRequest.mockImplementation(() => {
|
||||
let readCounter = 0;
|
||||
return {
|
||||
data: new Readable({
|
||||
read() {
|
||||
readCounter++;
|
||||
if (readCounter === 1) {
|
||||
this.push('foo');
|
||||
} else {
|
||||
this.emit('error', new Error('something went wrong'));
|
||||
}
|
||||
},
|
||||
}),
|
||||
};
|
||||
});
|
||||
|
||||
await expect(archives.attemptToDownload('foo')).resolves.toBe(false);
|
||||
expect(archives.size()).toBe(2);
|
||||
expect(readArchiveDir()).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"1234.zip",
|
||||
"5678.zip",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('resolves to first sha if it is available locally', async () => {
|
||||
const archives = await Archives.create(log, TMP);
|
||||
|
||||
expect(await archives.getFirstAvailable(['1234', '5678'])).toHaveProperty('sha', '1234');
|
||||
expect(await archives.getFirstAvailable(['5678', '1234'])).toHaveProperty('sha', '5678');
|
||||
});
|
||||
|
||||
it('resolves to first local sha when it tried to reach network and gets errors', async () => {
|
||||
const archives = await Archives.create(log, TMP);
|
||||
|
||||
mockRequest.mockImplementation(() => {
|
||||
throw new Error('no network available');
|
||||
});
|
||||
|
||||
expect(await archives.getFirstAvailable(['foo', 'bar', '1234'])).toHaveProperty('sha', '1234');
|
||||
expect(mockRequest).toHaveBeenCalledTimes(2);
|
||||
expect(logWriter.messages).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
" sill identified archive for 1234",
|
||||
" sill identified archive for 5678",
|
||||
" debg attempting to download cache for foo from https://ts-refs-cache.kibana.dev/foo.zip",
|
||||
" debg failed to download cache, ignoring error: no network available",
|
||||
" debg no archive available for foo",
|
||||
" debg attempting to download cache for bar from https://ts-refs-cache.kibana.dev/bar.zip",
|
||||
" debg failed to download cache, ignoring error: no network available",
|
||||
" debg no archive available for bar",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('resolves to first remote that downloads successfully', async () => {
|
||||
const archives = await Archives.create(log, TMP);
|
||||
|
||||
mockRequest.mockImplementation((params) => {
|
||||
if (params.url === `https://ts-refs-cache.kibana.dev/bar.zip`) {
|
||||
return {
|
||||
data: Readable.from('bar cache data'),
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('no network available');
|
||||
});
|
||||
|
||||
const archive = await archives.getFirstAvailable(['foo', 'bar', '1234']);
|
||||
expect(archive).toHaveProperty('sha', 'bar');
|
||||
expect(mockRequest).toHaveBeenCalledTimes(2);
|
||||
expect(logWriter.messages).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
" sill identified archive for 1234",
|
||||
" sill identified archive for 5678",
|
||||
" debg attempting to download cache for foo from https://ts-refs-cache.kibana.dev/foo.zip",
|
||||
" debg failed to download cache, ignoring error: no network available",
|
||||
" debg no archive available for foo",
|
||||
" debg attempting to download cache for bar from https://ts-refs-cache.kibana.dev/bar.zip",
|
||||
" debg download complete, renaming tmp",
|
||||
" debg download of cache for bar complete",
|
||||
]
|
||||
`);
|
||||
|
||||
expect(Fs.readFileSync(archive!.path, 'utf-8')).toBe('bar cache data');
|
||||
});
|
|
@ -0,0 +1,156 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
import Fs from 'fs';
|
||||
|
||||
import del from 'del';
|
||||
import cpy from 'cpy';
|
||||
import globby from 'globby';
|
||||
import {
|
||||
ToolingLog,
|
||||
createAbsolutePathSerializer,
|
||||
createStripAnsiSerializer,
|
||||
ToolingLogCollectingWriter,
|
||||
} from '@kbn/dev-utils';
|
||||
|
||||
import { RefOutputCache, OUTDIR_MERGE_BASE_FILENAME } from '../ref_output_cache';
|
||||
import { Archives } from '../archives';
|
||||
import type { RepoInfo } from '../repo_info';
|
||||
|
||||
jest.mock('../repo_info');
|
||||
const { RepoInfo: MockRepoInfo } = jest.requireMock('../repo_info');
|
||||
|
||||
jest.mock('axios');
|
||||
const { request: mockRequest } = jest.requireMock('axios');
|
||||
|
||||
expect.addSnapshotSerializer(createAbsolutePathSerializer());
|
||||
expect.addSnapshotSerializer(createStripAnsiSerializer());
|
||||
|
||||
const FIXTURE = Path.resolve(__dirname, '__fixtures__');
|
||||
const TMP = Path.resolve(__dirname, '__tmp__');
|
||||
const repo: jest.Mocked<RepoInfo> = new MockRepoInfo();
|
||||
const log = new ToolingLog();
|
||||
const logWriter = new ToolingLogCollectingWriter();
|
||||
log.setWriters([logWriter]);
|
||||
|
||||
beforeAll(() => del(TMP, { force: true }));
|
||||
beforeEach(() => cpy('.', TMP, { cwd: FIXTURE, parents: true }));
|
||||
afterEach(async () => {
|
||||
await del(TMP, { force: true });
|
||||
jest.resetAllMocks();
|
||||
logWriter.messages.length = 0;
|
||||
});
|
||||
|
||||
it('creates and extracts caches, ingoring dirs with matching merge-base file and placing merge-base files', async () => {
|
||||
// setup repo mock
|
||||
const HEAD = 'abcdefg';
|
||||
repo.getHeadSha.mockResolvedValue(HEAD);
|
||||
repo.getRelative.mockImplementation((path) => Path.relative(TMP, path));
|
||||
repo.getRecentShasFrom.mockResolvedValue(['5678', '1234']);
|
||||
|
||||
// create two fake outDirs
|
||||
const outDirs = [Path.resolve(TMP, 'out/foo'), Path.resolve(TMP, 'out/bar')];
|
||||
for (const dir of outDirs) {
|
||||
Fs.mkdirSync(dir, { recursive: true });
|
||||
Fs.writeFileSync(Path.resolve(dir, 'test'), 'hello world');
|
||||
}
|
||||
|
||||
// init an archives instance using tmp
|
||||
const archives = await Archives.create(log, TMP);
|
||||
|
||||
// init the RefOutputCache with our mock data
|
||||
const refOutputCache = new RefOutputCache(log, repo, archives, outDirs, HEAD);
|
||||
|
||||
// create the new cache right in the archives dir
|
||||
await refOutputCache.captureCache(Path.resolve(TMP));
|
||||
const cachePath = Path.resolve(TMP, `${HEAD}.zip`);
|
||||
|
||||
// check that the cache was created and stored in the archives
|
||||
if (!Fs.existsSync(cachePath)) {
|
||||
throw new Error('zip was not created as expected');
|
||||
}
|
||||
|
||||
mockRequest.mockImplementation((params: any) => {
|
||||
if (params.url.endsWith(`${HEAD}.zip`)) {
|
||||
return {
|
||||
data: Fs.createReadStream(cachePath),
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error(`unexpected url: ${params.url}`);
|
||||
});
|
||||
|
||||
// modify the files in the outDirs so we can see which ones are restored from the cache
|
||||
for (const dir of outDirs) {
|
||||
Fs.writeFileSync(Path.resolve(dir, 'test'), 'not cleared by cache init');
|
||||
}
|
||||
// add the mergeBase to the first outDir so that it is ignored
|
||||
Fs.writeFileSync(Path.resolve(outDirs[0], OUTDIR_MERGE_BASE_FILENAME), HEAD);
|
||||
|
||||
// rebuild the outDir from the refOutputCache
|
||||
await refOutputCache.initCaches();
|
||||
|
||||
const files = Object.fromEntries(
|
||||
globby
|
||||
.sync(outDirs, { dot: true })
|
||||
.map((path) => [Path.relative(TMP, path), Fs.readFileSync(path, 'utf-8')])
|
||||
);
|
||||
|
||||
expect(files).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"out/bar/.ts-ref-cache-merge-base": "abcdefg",
|
||||
"out/bar/test": "hello world",
|
||||
"out/foo/.ts-ref-cache-merge-base": "abcdefg",
|
||||
"out/foo/test": "not cleared by cache init",
|
||||
}
|
||||
`);
|
||||
expect(logWriter.messages).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
" sill identified archive for 1234",
|
||||
" sill identified archive for 5678",
|
||||
" debg writing ts-ref cache to abcdefg.zip",
|
||||
" succ wrote archive to abcdefg.zip",
|
||||
" debg attempting to download cache for abcdefg from https://ts-refs-cache.kibana.dev/abcdefg.zip",
|
||||
" debg download complete, renaming tmp",
|
||||
" debg download of cache for abcdefg complete",
|
||||
" debg extracting archives/abcdefg.zip to rebuild caches in 1 outDirs",
|
||||
" debg [out/bar] clearing outDir and replacing with cache",
|
||||
]
|
||||
`);
|
||||
});
|
||||
|
||||
it('cleans up oldest archives when there are more than 10', async () => {
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const time = i * 10_000;
|
||||
const path = Path.resolve(TMP, `archives/${time}.zip`);
|
||||
Fs.writeFileSync(path, '');
|
||||
Fs.utimesSync(path, time, time);
|
||||
}
|
||||
|
||||
const archives = await Archives.create(log, TMP);
|
||||
const cache = new RefOutputCache(log, repo, archives, [], '1234');
|
||||
expect(cache.archives.size()).toBe(102);
|
||||
await cache.cleanup();
|
||||
expect(cache.archives.size()).toBe(10);
|
||||
expect(Fs.readdirSync(Path.resolve(TMP, 'archives')).sort((a, b) => a.localeCompare(b)))
|
||||
.toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"1234.zip",
|
||||
"5678.zip",
|
||||
"920000.zip",
|
||||
"930000.zip",
|
||||
"940000.zip",
|
||||
"950000.zip",
|
||||
"960000.zip",
|
||||
"970000.zip",
|
||||
"980000.zip",
|
||||
"990000.zip",
|
||||
]
|
||||
`);
|
||||
});
|
185
src/dev/typescript/ref_output_cache/ref_output_cache.ts
Normal file
185
src/dev/typescript/ref_output_cache/ref_output_cache.ts
Normal file
|
@ -0,0 +1,185 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
import Fs from 'fs/promises';
|
||||
|
||||
import { ToolingLog, kibanaPackageJson } from '@kbn/dev-utils';
|
||||
import del from 'del';
|
||||
import tempy from 'tempy';
|
||||
|
||||
import { Archives } from './archives';
|
||||
import { unzip, zip } from './zip';
|
||||
import { concurrentMap } from '../concurrent_map';
|
||||
import { RepoInfo } from './repo_info';
|
||||
|
||||
export const OUTDIR_MERGE_BASE_FILENAME = '.ts-ref-cache-merge-base';
|
||||
|
||||
export async function matchMergeBase(outDir: string, sha: string) {
|
||||
try {
|
||||
const existing = await Fs.readFile(Path.resolve(outDir, OUTDIR_MERGE_BASE_FILENAME), 'utf8');
|
||||
return existing === sha;
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export async function isDir(path: string) {
|
||||
try {
|
||||
return (await Fs.stat(path)).isDirectory();
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return false;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export class RefOutputCache {
|
||||
static async create(options: {
|
||||
log: ToolingLog;
|
||||
workingDir: string;
|
||||
outDirs: string[];
|
||||
repoRoot: string;
|
||||
upstreamUrl: string;
|
||||
}) {
|
||||
const repoInfo = new RepoInfo(options.log, options.repoRoot, options.upstreamUrl);
|
||||
const archives = await Archives.create(options.log, options.workingDir);
|
||||
|
||||
const upstreamBranch: string = kibanaPackageJson.branch;
|
||||
const mergeBase = await repoInfo.getMergeBase('HEAD', upstreamBranch);
|
||||
|
||||
return new RefOutputCache(options.log, repoInfo, archives, options.outDirs, mergeBase);
|
||||
}
|
||||
|
||||
constructor(
|
||||
private readonly log: ToolingLog,
|
||||
private readonly repo: RepoInfo,
|
||||
public readonly archives: Archives,
|
||||
private readonly outDirs: string[],
|
||||
private readonly mergeBase: string
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Find the most recent cache/archive of the outDirs and replace the outDirs
|
||||
* on disk with the files in the cache if the outDir has an outdated merge-base
|
||||
* written to the directory.
|
||||
*/
|
||||
async initCaches() {
|
||||
const archive =
|
||||
this.archives.get(this.mergeBase) ??
|
||||
(await this.archives.getFirstAvailable([
|
||||
this.mergeBase,
|
||||
...(await this.repo.getRecentShasFrom(this.mergeBase, 5)),
|
||||
]));
|
||||
|
||||
if (!archive) {
|
||||
return;
|
||||
}
|
||||
|
||||
const outdatedOutDirs = (
|
||||
await concurrentMap(100, this.outDirs, async (outDir) => ({
|
||||
path: outDir,
|
||||
outdated: !(await matchMergeBase(outDir, archive.sha)),
|
||||
}))
|
||||
)
|
||||
.filter((o) => o.outdated)
|
||||
.map((o) => o.path);
|
||||
|
||||
if (!outdatedOutDirs.length) {
|
||||
this.log.debug('all outDirs have the most recent cache');
|
||||
return;
|
||||
}
|
||||
|
||||
const tmpDir = tempy.directory();
|
||||
this.log.debug(
|
||||
'extracting',
|
||||
this.repo.getRelative(archive.path),
|
||||
'to rebuild caches in',
|
||||
outdatedOutDirs.length,
|
||||
'outDirs'
|
||||
);
|
||||
await unzip(archive.path, tmpDir);
|
||||
|
||||
const cacheNames = await Fs.readdir(tmpDir);
|
||||
|
||||
await concurrentMap(50, outdatedOutDirs, async (outDir) => {
|
||||
const relative = this.repo.getRelative(outDir);
|
||||
const cacheName = `${relative.split(Path.sep).join('__')}.zip`;
|
||||
|
||||
if (!cacheNames.includes(cacheName)) {
|
||||
this.log.debug(`[${relative}] not in cache`);
|
||||
await Fs.mkdir(outDir, { recursive: true });
|
||||
await Fs.writeFile(Path.resolve(outDir, OUTDIR_MERGE_BASE_FILENAME), archive.sha);
|
||||
return;
|
||||
}
|
||||
|
||||
if (await matchMergeBase(outDir, archive.sha)) {
|
||||
this.log.debug(`[${relative}] keeping outdir, created from selected sha`);
|
||||
return;
|
||||
}
|
||||
|
||||
this.log.debug(`[${relative}] clearing outDir and replacing with cache`);
|
||||
await del(outDir);
|
||||
await unzip(Path.resolve(tmpDir, cacheName), outDir);
|
||||
await Fs.writeFile(Path.resolve(outDir, OUTDIR_MERGE_BASE_FILENAME), archive.sha);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate through the outDirs, zip them up, and then zip up those zips
|
||||
* into a single file which we can upload/download/extract just a portion
|
||||
* of the archive.
|
||||
*
|
||||
* @param outputDir directory that the {HEAD}.zip file should be written to
|
||||
*/
|
||||
async captureCache(outputDir: string) {
|
||||
const tmpDir = tempy.directory();
|
||||
const currentSha = await this.repo.getHeadSha();
|
||||
const outputPath = Path.resolve(outputDir, `${currentSha}.zip`);
|
||||
const relativeOutputPath = this.repo.getRelative(outputPath);
|
||||
|
||||
this.log.debug('writing ts-ref cache to', relativeOutputPath);
|
||||
|
||||
const subZips: Array<[string, string]> = [];
|
||||
|
||||
await Promise.all(
|
||||
this.outDirs.map(async (absolute) => {
|
||||
const relative = this.repo.getRelative(absolute);
|
||||
const subZipName = `${relative.split(Path.sep).join('__')}.zip`;
|
||||
const subZipPath = Path.resolve(tmpDir, subZipName);
|
||||
await zip([[absolute, '/']], [], subZipPath);
|
||||
subZips.push([subZipPath, subZipName]);
|
||||
})
|
||||
);
|
||||
|
||||
await zip([], subZips, outputPath);
|
||||
await del(tmpDir, { force: true });
|
||||
this.log.success('wrote archive to', relativeOutputPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup the downloaded cache files, keeping the 10 newest files. Each file
|
||||
* is about 25-30MB, so 10 downloads is a a decent amount of disk space for
|
||||
* caches but we could potentially increase this number in the future if we like
|
||||
*/
|
||||
async cleanup() {
|
||||
// sort archives by time desc
|
||||
const archives = [...this.archives].sort((a, b) => b.time - a.time);
|
||||
|
||||
// delete the 11th+ archive
|
||||
for (const { sha } of archives.slice(10)) {
|
||||
await this.archives.delete(sha);
|
||||
}
|
||||
}
|
||||
}
|
55
src/dev/typescript/ref_output_cache/repo_info.ts
Normal file
55
src/dev/typescript/ref_output_cache/repo_info.ts
Normal file
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Path from 'path';
|
||||
|
||||
import execa from 'execa';
|
||||
import { ToolingLog } from '@kbn/dev-utils';
|
||||
|
||||
export class RepoInfo {
|
||||
constructor(
|
||||
private readonly log: ToolingLog,
|
||||
private readonly dir: string,
|
||||
private readonly upstreamUrl: string
|
||||
) {}
|
||||
|
||||
async getRecentShasFrom(sha: string, size: number) {
|
||||
return (await this.git(['log', '--pretty=%P', `-n`, `${size}`, sha]))
|
||||
.split('\n')
|
||||
.map((l) => l.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
async getMergeBase(ref: string, upstreamBranch: string) {
|
||||
this.log.info('ensuring we have the latest changelog from upstream', upstreamBranch);
|
||||
await this.git(['fetch', this.upstreamUrl, upstreamBranch]);
|
||||
|
||||
this.log.info('determining merge base with upstream');
|
||||
|
||||
const mergeBase = this.git(['merge-base', ref, 'FETCH_HEAD']);
|
||||
this.log.info('merge base with', upstreamBranch, 'is', mergeBase);
|
||||
|
||||
return mergeBase;
|
||||
}
|
||||
|
||||
async getHeadSha() {
|
||||
return await this.git(['rev-parse', 'HEAD']);
|
||||
}
|
||||
|
||||
getRelative(path: string) {
|
||||
return Path.relative(this.dir, path);
|
||||
}
|
||||
|
||||
private async git(args: string[]) {
|
||||
const proc = await execa('git', args, {
|
||||
cwd: this.dir,
|
||||
});
|
||||
|
||||
return proc.stdout.trim();
|
||||
}
|
||||
}
|
52
src/dev/typescript/ref_output_cache/zip.ts
Normal file
52
src/dev/typescript/ref_output_cache/zip.ts
Normal file
|
@ -0,0 +1,52 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import Fs from 'fs/promises';
|
||||
import { createWriteStream } from 'fs';
|
||||
import Path from 'path';
|
||||
import { pipeline } from 'stream';
|
||||
import { promisify } from 'util';
|
||||
|
||||
import extractZip from 'extract-zip';
|
||||
import archiver from 'archiver';
|
||||
|
||||
const asyncPipeline = promisify(pipeline);
|
||||
|
||||
export async function zip(
|
||||
dirs: Array<[string, string]>,
|
||||
files: Array<[string, string]>,
|
||||
outputPath: string
|
||||
) {
|
||||
const archive = archiver('zip', {
|
||||
zlib: {
|
||||
level: 9,
|
||||
},
|
||||
});
|
||||
|
||||
for (const [absolute, relative] of dirs) {
|
||||
archive.directory(absolute, relative);
|
||||
}
|
||||
|
||||
for (const [absolute, relative] of files) {
|
||||
archive.file(absolute, {
|
||||
name: relative,
|
||||
});
|
||||
}
|
||||
|
||||
// ensure output dir exists
|
||||
await Fs.mkdir(Path.dirname(outputPath), { recursive: true });
|
||||
|
||||
// await the promise from the pipeline and archive.finalize()
|
||||
await Promise.all([asyncPipeline(archive, createWriteStream(outputPath)), archive.finalize()]);
|
||||
}
|
||||
|
||||
export async function unzip(path: string, outputDir: string) {
|
||||
await extractZip(path, {
|
||||
dir: outputDir,
|
||||
});
|
||||
}
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import { BehaviorSubject, Observable } from 'rxjs';
|
||||
import { first } from 'rxjs/operators';
|
||||
import { kibanaPackageJSON } from '@kbn/utils';
|
||||
import { kibanaPackageJson } from '@kbn/utils';
|
||||
|
||||
import {
|
||||
ElasticsearchClient,
|
||||
|
@ -34,8 +34,8 @@ class AppContextService {
|
|||
private configSubject$?: BehaviorSubject<FleetConfigType>;
|
||||
private savedObjects: SavedObjectsServiceStart | undefined;
|
||||
private isProductionMode: FleetAppContext['isProductionMode'] = false;
|
||||
private kibanaVersion: FleetAppContext['kibanaVersion'] = kibanaPackageJSON.version;
|
||||
private kibanaBranch: FleetAppContext['kibanaBranch'] = kibanaPackageJSON.branch;
|
||||
private kibanaVersion: FleetAppContext['kibanaVersion'] = kibanaPackageJson.version;
|
||||
private kibanaBranch: FleetAppContext['kibanaBranch'] = kibanaPackageJson.branch;
|
||||
private cloud?: CloudSetup;
|
||||
private logger: Logger | undefined;
|
||||
private httpSetup?: HttpServiceSetup;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue