typescript-ify portions of src/optimize (#64688)

This commit is contained in:
Spencer 2020-04-28 18:14:34 -07:00 committed by GitHub
parent 6986c73cd9
commit 408ad6f389
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 232 additions and 121 deletions

View file

@ -18,10 +18,13 @@
*/
import { isAbsolute, extname, join } from 'path';
import LruCache from 'lru-cache';
import Hapi from 'hapi';
import * as UiSharedDeps from '@kbn/ui-shared-deps';
import { createDynamicAssetResponse } from './dynamic_asset_response';
import { assertIsNpUiPluginPublicDirs } from '../np_ui_plugin_public_dirs';
import { FileHashCache } from './file_hash_cache';
import { assertIsNpUiPluginPublicDirs, NpUiPluginPublicDirs } from '../np_ui_plugin_public_dirs';
import { fromRoot } from '../../core/server/utils';
/**
@ -44,11 +47,17 @@ export function createBundlesRoute({
basePublicPath,
builtCssPath,
npUiPluginPublicDirs = [],
}: {
regularBundlesPath: string;
dllBundlesPath: string;
basePublicPath: string;
builtCssPath: string;
npUiPluginPublicDirs?: NpUiPluginPublicDirs;
}) {
// rather than calculate the fileHash on every request, we
// provide a cache object to `resolveDynamicAssetResponse()` that
// will store the 100 most recently used hashes.
const fileHashCache = new LruCache(100);
const fileHashCache = new FileHashCache();
assertIsNpUiPluginPublicDirs(npUiPluginPublicDirs);
if (typeof regularBundlesPath !== 'string' || !isAbsolute(regularBundlesPath)) {
@ -122,6 +131,12 @@ function buildRouteForBundles({
bundlesPath,
fileHashCache,
replacePublicPath = true,
}: {
publicPath: string;
routePath: string;
bundlesPath: string;
fileHashCache: FileHashCache;
replacePublicPath?: boolean;
}) {
return {
method: 'GET',
@ -130,7 +145,7 @@ function buildRouteForBundles({
auth: false,
ext: {
onPreHandler: {
method(request, h) {
method(request: Hapi.Request, h: Hapi.ResponseToolkit) {
const ext = extname(request.params.path);
if (ext !== '.js' && ext !== '.css') {

View file

@ -18,14 +18,20 @@
*/
import { resolve } from 'path';
import { open, fstat, createReadStream, close } from 'fs';
import Fs from 'fs';
import { promisify } from 'util';
import Boom from 'boom';
import { fromNode as fcb } from 'bluebird';
import Hapi from 'hapi';
import { FileHashCache } from './file_hash_cache';
import { getFileHash } from './file_hash';
import { replacePlaceholder } from '../public_path_placeholder';
const asyncOpen = promisify(Fs.open);
const asyncClose = promisify(Fs.close);
const asyncFstat = promisify(Fs.fstat);
/**
* Create a Hapi response for the requested path. This is designed
* to replicate a subset of the features provided by Hapi's Inert
@ -44,39 +50,46 @@ import { replacePlaceholder } from '../public_path_placeholder';
* - cached hash/etag is based on the file on disk, but modified
* by the public path so that individual public paths have
* different etags, but can share a cache
*
* @param {Object} options
* @property {Hapi.Request} options.request
* @property {string} options.bundlesPath
* @property {string} options.publicPath
* @property {LruCache} options.fileHashCache
*/
export async function createDynamicAssetResponse(options) {
const { request, h, bundlesPath, publicPath, fileHashCache, replacePublicPath } = options;
export async function createDynamicAssetResponse({
request,
h,
bundlesPath,
publicPath,
fileHashCache,
replacePublicPath,
}: {
request: Hapi.Request;
h: Hapi.ResponseToolkit;
bundlesPath: string;
publicPath: string;
fileHashCache: FileHashCache;
replacePublicPath: boolean;
}) {
let fd: number | undefined;
let fd;
try {
const path = resolve(bundlesPath, request.params.path);
// prevent path traversal, only process paths that resolve within bundlesPath
if (!path.startsWith(bundlesPath)) {
throw Boom.forbidden(null, 'EACCES');
throw Boom.forbidden(undefined, 'EACCES');
}
// we use and manage a file descriptor mostly because
// that's what Inert does, and since we are accessing
// the file 2 or 3 times per request it seems logical
fd = await fcb(cb => open(path, 'r', cb));
fd = await asyncOpen(path, 'r');
const stat = await fcb(cb => fstat(fd, cb));
const stat = await asyncFstat(fd);
const hash = await getFileHash(fileHashCache, path, stat, fd);
const read = createReadStream(null, {
const read = Fs.createReadStream(null as any, {
fd,
start: 0,
autoClose: true,
});
fd = null; // read stream is now responsible for fd
fd = undefined; // read stream is now responsible for fd
const content = replacePublicPath ? replacePlaceholder(read, publicPath) : read;
const etag = replacePublicPath ? `${hash}-${publicPath}` : hash;
@ -91,8 +104,8 @@ export async function createDynamicAssetResponse(options) {
} catch (error) {
if (fd) {
try {
await fcb(cb => close(fd, cb));
} catch (error) {
await asyncClose(fd);
} catch (_) {
// ignore errors from close, we already have one to report
// and it's very likely they are the same
}

View file

@ -18,20 +18,17 @@
*/
import { createHash } from 'crypto';
import { createReadStream } from 'fs';
import Fs from 'fs';
import * as Rx from 'rxjs';
import { merge, mergeMap, takeUntil } from 'rxjs/operators';
import { takeUntil, map } from 'rxjs/operators';
import { FileHashCache } from './file_hash_cache';
/**
* Get the hash of a file via a file descriptor
* @param {LruCache} cache
* @param {string} path
* @param {Fs.Stat} stat
* @param {Fs.FileDescriptor} fd
* @return {Promise<string>}
*/
export async function getFileHash(cache, path, stat, fd) {
export async function getFileHash(cache: FileHashCache, path: string, stat: Fs.Stats, fd: number) {
const key = `${path}:${stat.ino}:${stat.size}:${stat.mtime.getTime()}`;
const cached = cache.get(key);
@ -40,17 +37,21 @@ export async function getFileHash(cache, path, stat, fd) {
}
const hash = createHash('sha1');
const read = createReadStream(null, {
const read = Fs.createReadStream(null as any, {
fd,
start: 0,
autoClose: false,
});
const promise = Rx.fromEvent(read, 'data')
.pipe(
merge(Rx.fromEvent(read, 'error').pipe(mergeMap(Rx.throwError))),
takeUntil(Rx.fromEvent(read, 'end'))
const promise = Rx.merge(
Rx.fromEvent<Buffer>(read, 'data'),
Rx.fromEvent<Error>(read, 'error').pipe(
map(error => {
throw error;
})
)
)
.pipe(takeUntil(Rx.fromEvent(read, 'end')))
.forEach(chunk => hash.update(chunk))
.then(() => hash.digest('hex'))
.catch(error => {

View file

@ -0,0 +1,36 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import LruCache from 'lru-cache';
export class FileHashCache {
private lru = new LruCache<string, Promise<string>>(100);
get(key: string) {
return this.lru.get(key);
}
set(key: string, value: Promise<string>) {
this.lru.set(key, value);
}
del(key: string) {
this.lru.del(key);
}
}

View file

@ -17,7 +17,7 @@
* under the License.
*/
export function createProxyBundlesRoute({ host, port }) {
export function createProxyBundlesRoute({ host, port }: { host: string; port: number }) {
return [
buildProxyRouteForBundles('/bundles/', host, port),
buildProxyRouteForBundles('/built_assets/dlls/', host, port),
@ -25,7 +25,7 @@ export function createProxyBundlesRoute({ host, port }) {
];
}
function buildProxyRouteForBundles(routePath, host, port) {
function buildProxyRouteForBundles(routePath: string, host: string, port: number) {
return {
path: `${routePath}{path*}`,
method: 'GET',

View file

@ -17,72 +17,5 @@
* under the License.
*/
import FsOptimizer from './fs_optimizer';
import { createBundlesRoute } from './bundles_route';
import { DllCompiler } from './dynamic_dll_plugin';
import { fromRoot } from '../core/server/utils';
import { getNpUiPluginPublicDirs } from './np_ui_plugin_public_dirs';
export default async (kbnServer, server, config) => {
if (!config.get('optimize.enabled')) return;
// the watch optimizer sets up two threads, one is the server listening
// on 5601 and the other is a server listening on 5602 that builds the
// bundles in a "middleware" style.
//
// the server listening on 5601 may be restarted a number of times, depending
// on the watch setup managed by the cli. It proxies all bundles/* and built_assets/dlls/*
// requests to the other server. The server on 5602 is long running, in order
// to prevent complete rebuilds of the optimize content.
const watch = config.get('optimize.watch');
if (watch) {
return await kbnServer.mixin(require('./watch/watch'));
}
const { uiBundles } = kbnServer;
server.route(
createBundlesRoute({
regularBundlesPath: uiBundles.getWorkingDir(),
dllBundlesPath: DllCompiler.getRawDllConfig().outputPath,
basePublicPath: config.get('server.basePath'),
builtCssPath: fromRoot('built_assets/css'),
npUiPluginPublicDirs: getNpUiPluginPublicDirs(kbnServer),
})
);
// in prod, only bundle when something is missing or invalid
const reuseCache = config.get('optimize.useBundleCache')
? await uiBundles.areAllBundleCachesValid()
: false;
// we might not have any work to do
if (reuseCache) {
server.log(['debug', 'optimize'], `All bundles are cached and ready to go!`);
return;
}
await uiBundles.resetBundleDir();
// only require the FsOptimizer when we need to
const optimizer = new FsOptimizer({
logWithMetadata: (tags, message, metadata) => server.logWithMetadata(tags, message, metadata),
uiBundles,
profile: config.get('optimize.profile'),
sourceMaps: config.get('optimize.sourceMaps'),
workers: config.get('optimize.workers'),
});
server.log(
['info', 'optimize'],
`Optimizing and caching ${uiBundles.getDescription()}. This may take a few minutes`
);
const start = Date.now();
await optimizer.run();
const seconds = ((Date.now() - start) / 1000).toFixed(2);
server.log(
['info', 'optimize'],
`Optimization of ${uiBundles.getDescription()} complete in ${seconds} seconds`
);
};
import { optimizeMixin } from './optimize_mixin';
export default optimizeMixin;

View file

@ -17,7 +17,14 @@
* under the License.
*/
export function getNpUiPluginPublicDirs(kbnServer) {
import KbnServer from '../legacy/server/kbn_server';
export type NpUiPluginPublicDirs = Array<{
id: string;
path: string;
}>;
export function getNpUiPluginPublicDirs(kbnServer: KbnServer): NpUiPluginPublicDirs {
return Array.from(kbnServer.newPlatform.__internals.uiPlugins.internal.entries()).map(
([id, { publicTargetDir }]) => ({
id,
@ -26,17 +33,17 @@ export function getNpUiPluginPublicDirs(kbnServer) {
);
}
export function isNpUiPluginPublicDirs(something) {
export function isNpUiPluginPublicDirs(x: any): x is NpUiPluginPublicDirs {
return (
Array.isArray(something) &&
something.every(
Array.isArray(x) &&
x.every(
s => typeof s === 'object' && s && typeof s.id === 'string' && typeof s.path === 'string'
)
);
}
export function assertIsNpUiPluginPublicDirs(something) {
if (!isNpUiPluginPublicDirs(something)) {
export function assertIsNpUiPluginPublicDirs(x: any): asserts x is NpUiPluginPublicDirs {
if (!isNpUiPluginPublicDirs(x)) {
throw new TypeError(
'npUiPluginPublicDirs must be an array of objects with string `id` and `path` properties'
);

View file

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import Hapi from 'hapi';
// @ts-ignore not TS yet
import FsOptimizer from './fs_optimizer';
import { createBundlesRoute } from './bundles_route';
// @ts-ignore not TS yet
import { DllCompiler } from './dynamic_dll_plugin';
import { fromRoot } from '../core/server/utils';
import { getNpUiPluginPublicDirs } from './np_ui_plugin_public_dirs';
import KbnServer, { KibanaConfig } from '../legacy/server/kbn_server';
export const optimizeMixin = async (
kbnServer: KbnServer,
server: Hapi.Server,
config: KibanaConfig
) => {
if (!config.get('optimize.enabled')) return;
// the watch optimizer sets up two threads, one is the server listening
// on 5601 and the other is a server listening on 5602 that builds the
// bundles in a "middleware" style.
//
// the server listening on 5601 may be restarted a number of times, depending
// on the watch setup managed by the cli. It proxies all bundles/* and built_assets/dlls/*
// requests to the other server. The server on 5602 is long running, in order
// to prevent complete rebuilds of the optimize content.
const watch = config.get('optimize.watch');
if (watch) {
// eslint-disable-next-line @typescript-eslint/no-var-requires
return await kbnServer.mixin(require('./watch/watch'));
}
const { uiBundles } = kbnServer;
server.route(
createBundlesRoute({
regularBundlesPath: uiBundles.getWorkingDir(),
dllBundlesPath: DllCompiler.getRawDllConfig().outputPath,
basePublicPath: config.get('server.basePath'),
builtCssPath: fromRoot('built_assets/css'),
npUiPluginPublicDirs: getNpUiPluginPublicDirs(kbnServer),
})
);
// in prod, only bundle when something is missing or invalid
const reuseCache = config.get('optimize.useBundleCache')
? await uiBundles.areAllBundleCachesValid()
: false;
// we might not have any work to do
if (reuseCache) {
server.log(['debug', 'optimize'], `All bundles are cached and ready to go!`);
return;
}
await uiBundles.resetBundleDir();
// only require the FsOptimizer when we need to
const optimizer = new FsOptimizer({
logWithMetadata: server.logWithMetadata,
uiBundles,
profile: config.get('optimize.profile'),
sourceMaps: config.get('optimize.sourceMaps'),
workers: config.get('optimize.workers'),
});
server.log(
['info', 'optimize'],
`Optimizing and caching ${uiBundles.getDescription()}. This may take a few minutes`
);
const start = Date.now();
await optimizer.run();
const seconds = ((Date.now() - start) / 1000).toFixed(2);
server.log(
['info', 'optimize'],
`Optimization of ${uiBundles.getDescription()} complete in ${seconds} seconds`
);
};

View file

@ -17,14 +17,20 @@
* under the License.
*/
import { createReplaceStream } from '../legacy/utils';
import Stream from 'stream';
import Fs from 'fs';
import * as Rx from 'rxjs';
import { take, takeUntil } from 'rxjs/operators';
import { createReplaceStream } from '../legacy/utils';
export const PUBLIC_PATH_PLACEHOLDER = '__REPLACE_WITH_PUBLIC_PATH__';
export function replacePlaceholder(read, replacement) {
interface ClosableTransform extends Stream.Transform {
close(): void;
}
export function replacePlaceholder(read: Stream.Readable, replacement: string) {
const replace = createReplaceStream(PUBLIC_PATH_PLACEHOLDER, replacement);
// handle errors on the read stream by proxying them
@ -37,13 +43,15 @@ export function replacePlaceholder(read, replacement) {
replace.end();
});
replace.close = () => {
read.unpipe();
const closableReplace: ClosableTransform = Object.assign(replace, {
close: () => {
read.unpipe();
if (read.close) {
read.close();
}
};
if ('close' in read) {
(read as Fs.ReadStream).close();
}
},
});
return read.pipe(replace);
return read.pipe(closableReplace);
}