mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[Reporting] code cleanup for reporting browser build/install/setup utilities (#98799)
* [Reporting] code cleanup for reporting browser setup utilities * fix target_cpu * Update README.md * Update README.md * add note about target_cpu * Update paths.ts * more cleanup * Update src/dev/chromium_version.ts Co-authored-by: Michael Dokolin <dokmic@gmail.com> * remove bug Co-authored-by: Michael Dokolin <dokmic@gmail.com> Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
This commit is contained in:
parent
93b064930a
commit
832d3b7ffd
30 changed files with 257 additions and 237 deletions
|
@ -35,8 +35,10 @@ async function getPuppeteerRelease(log: ToolingLog): Promise<PuppeteerRelease> {
|
|||
'Could not get the Puppeteer version! Check node_modules/puppteer/package.json'
|
||||
);
|
||||
}
|
||||
log.info(`Kibana is using Puppeteer ${version} (${forkCompatibilityMap[version]})`);
|
||||
return forkCompatibilityMap[version];
|
||||
const puppeteerRelease = forkCompatibilityMap[version] ?? version;
|
||||
|
||||
log.info(`Kibana is using Puppeteer ${version} (${puppeteerRelease})`);
|
||||
return puppeteerRelease;
|
||||
}
|
||||
|
||||
async function getChromiumRevision(
|
||||
|
@ -129,8 +131,8 @@ run(
|
|||
description: chalk`
|
||||
Display the Chromium git commit that correlates to a given Puppeteer release.
|
||||
|
||||
- node x-pack/dev-tools/chromium_version 5.5.0 {dim # gets the Chromium commit for Puppeteer v5.5.0}
|
||||
- node x-pack/dev-tools/chromium_version {dim # gets the Chromium commit for the Kibana dependency version of Puppeteer}
|
||||
- node scripts/chromium_version 5.5.0 {dim # gets the Chromium commit for Puppeteer v5.5.0}
|
||||
- node scripts/chromium_version {dim # gets the Chromium commit for the Kibana dependency version of Puppeteer}
|
||||
|
||||
You can use https://omahaproxy.appspot.com/ to look up the Chromium release that first shipped with that commit.
|
||||
`,
|
||||
|
|
|
@ -15,14 +15,14 @@ const log = new ToolingLog({
|
|||
});
|
||||
|
||||
describe(`enumeratePatterns`, () => {
|
||||
it(`should resolve x-pack/plugins/reporting/server/browsers/extract/unzip.js to kibana-reporting`, () => {
|
||||
it(`should resolve x-pack/plugins/reporting/server/browsers/extract/unzip.ts to kibana-reporting`, () => {
|
||||
const actual = enumeratePatterns(REPO_ROOT)(log)(
|
||||
new Map([['x-pack/plugins/reporting', ['kibana-reporting']]])
|
||||
);
|
||||
|
||||
expect(
|
||||
actual[0].includes(
|
||||
'x-pack/plugins/reporting/server/browsers/extract/unzip.js kibana-reporting'
|
||||
'x-pack/plugins/reporting/server/browsers/extract/unzip.ts kibana-reporting'
|
||||
)
|
||||
).toBe(true);
|
||||
});
|
||||
|
|
|
@ -148,10 +148,10 @@ x-pack/plugins/reporting/server/browsers/download/download.ts kibana-reporting
|
|||
x-pack/plugins/reporting/server/browsers/download/ensure_downloaded.ts kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/download/index.ts kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/download/util.ts kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/extract/extract.js kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/extract/extract_error.js kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/extract/index.js kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/extract/unzip.js kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/extract/extract.ts kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/extract/extract_error.ts kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/extract/index.ts kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/extract/unzip.ts kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/index.ts kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/install.ts kibana-reporting
|
||||
x-pack/plugins/reporting/server/browsers/network_policy.test.ts kibana-reporting
|
||||
|
|
|
@ -32,13 +32,13 @@ describe(`Transform fns`, () => {
|
|||
it(`should remove the jenkins workspace path`, () => {
|
||||
const obj = {
|
||||
staticSiteUrl:
|
||||
'/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.js',
|
||||
'/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.ts',
|
||||
COVERAGE_INGESTION_KIBANA_ROOT:
|
||||
'/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana',
|
||||
};
|
||||
expect(coveredFilePath(obj)).toHaveProperty(
|
||||
'coveredFilePath',
|
||||
'x-pack/plugins/reporting/server/browsers/extract/unzip.js'
|
||||
'x-pack/plugins/reporting/server/browsers/extract/unzip.ts'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@ -46,13 +46,13 @@ describe(`Transform fns`, () => {
|
|||
it(`should remove the jenkins workspace path`, () => {
|
||||
const obj = {
|
||||
staticSiteUrl:
|
||||
'/var/lib/jenkins/workspace/elastic+kibana+qa-research/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.js',
|
||||
'/var/lib/jenkins/workspace/elastic+kibana+qa-research/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.ts',
|
||||
COVERAGE_INGESTION_KIBANA_ROOT:
|
||||
'/var/lib/jenkins/workspace/elastic+kibana+qa-research/kibana',
|
||||
};
|
||||
expect(coveredFilePath(obj)).toHaveProperty(
|
||||
'coveredFilePath',
|
||||
'x-pack/plugins/reporting/server/browsers/extract/unzip.js'
|
||||
'x-pack/plugins/reporting/server/browsers/extract/unzip.ts'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
@ -82,7 +82,7 @@ describe(`Transform fns`, () => {
|
|||
describe(`teamAssignment`, () => {
|
||||
const teamAssignmentsPathMOCK =
|
||||
'src/dev/code_coverage/ingest_coverage/__tests__/mocks/team_assign_mock.txt';
|
||||
const coveredFilePath = 'x-pack/plugins/reporting/server/browsers/extract/unzip.js';
|
||||
const coveredFilePath = 'x-pack/plugins/reporting/server/browsers/extract/unzip.ts';
|
||||
const obj = { coveredFilePath };
|
||||
const log = new ToolingLog({
|
||||
level: 'info',
|
||||
|
|
2
x-pack/.gitignore
vendored
2
x-pack/.gitignore
vendored
|
@ -3,7 +3,7 @@
|
|||
/target
|
||||
/test/functional/failure_debug
|
||||
/test/functional/screenshots
|
||||
/test/functional/apps/reporting/reports/session
|
||||
/test/functional/apps/**/reports/session
|
||||
/test/reporting/configs/failure_debug/
|
||||
/plugins/reporting/.chromium/
|
||||
/plugins/reporting/chromium/
|
||||
|
|
|
@ -12,8 +12,8 @@ which is where we have two machines provisioned for the Linux and Windows
|
|||
builds. Mac builds can be achieved locally, and are a great place to start to
|
||||
gain familiarity.
|
||||
|
||||
**NOTE:** Linux builds should be done in Ubuntu on x86 architecture. ARM builds
|
||||
are created in x86. CentOS is not supported for building Chromium.
|
||||
**NOTE:** Linux builds should be done in Ubuntu on x64 architecture. ARM builds
|
||||
are created in x64 using cross-compiling. CentOS is not supported for building Chromium.
|
||||
|
||||
1. Login to our GCP instance [here using your okta credentials](https://console.cloud.google.com/).
|
||||
2. Click the "Compute Engine" tab.
|
||||
|
@ -27,25 +27,32 @@ are created in x86. CentOS is not supported for building Chromium.
|
|||
- python2 (`python` must link to `python2`)
|
||||
- lsb_release
|
||||
- tmux is recommended in case your ssh session is interrupted
|
||||
6. Copy the entire `build_chromium` directory into a GCP storage bucket, so you can copy the scripts into the instance and run them.
|
||||
- "Cloud API access scopes": must have **read / write** scope for the Storage API
|
||||
6. Copy the entire `build_chromium` directory from the `headless_shell_staging` bucket. To do this, use `gsutil rsync`:
|
||||
```sh
|
||||
# This shows a preview of what would change by synchronizing the source scripts with the destination GCS bucket.
|
||||
# Remove the `-n` flag to enact the changes
|
||||
gsutil -m rsync -n -r x-pack/build_chromium gs://headless_shell_staging/build_chromium
|
||||
```
|
||||
|
||||
## Build Script Usage
|
||||
|
||||
```
|
||||
These commands show how to set up an environment to build:
|
||||
```sh
|
||||
# Allow our scripts to use depot_tools commands
|
||||
export PATH=$HOME/chromium/depot_tools:$PATH
|
||||
|
||||
# Create a dedicated working directory for this directory of Python scripts.
|
||||
mkdir ~/chromium && cd ~/chromium
|
||||
|
||||
# Copy the scripts from the Kibana repo to use them conveniently in the working directory
|
||||
gsutil cp -r gs://my-bucket/build_chromium .
|
||||
# Copy the scripts from the Kibana team's GCS bucket
|
||||
gsutil cp -r gs://headless_shell_staging/build_chromium .
|
||||
|
||||
# Install the OS packages, configure the environment, download the chromium source (25GB)
|
||||
python ./build_chromium/init.sh [arch_name]
|
||||
python ./build_chromium/init.py [arch_name]
|
||||
|
||||
# Run the build script with the path to the chromium src directory, the git commit hash
|
||||
python ./build_chromium/build.py <commit_id> x86
|
||||
python ./build_chromium/build.py <commit_id> x64
|
||||
|
||||
# OR You can build for ARM
|
||||
python ./build_chromium/build.py <commit_id> arm64
|
||||
|
@ -107,7 +114,7 @@ use the Kibana `build.py` script (in this directory).
|
|||
|
||||
It's recommended that you create a working directory for the chromium source
|
||||
code and all the build tools, and run the commands from there:
|
||||
```
|
||||
```sh
|
||||
mkdir ~/chromium && cd ~/chromium
|
||||
cp -r ~/path/to/kibana/x-pack/build_chromium .
|
||||
python ./build_chromium/init.sh [arch_name]
|
||||
|
@ -216,6 +223,7 @@ In the case of Windows, you can use IE to open `http://localhost:9221` and see i
|
|||
|
||||
The following links provide helpful context about how the Chromium build works, and its prerequisites:
|
||||
|
||||
- Tools for Chromium version information: https://omahaproxy.appspot.com/
|
||||
- https://www.chromium.org/developers/how-tos/get-the-code/working-with-release-branches
|
||||
- https://chromium.googlesource.com/chromium/src/+/HEAD/docs/windows_build_instructions.md
|
||||
- https://chromium.googlesource.com/chromium/src/+/HEAD/docs/mac_build_instructions.md
|
||||
|
|
|
@ -3,9 +3,7 @@ from os import path
|
|||
from build_util import (
|
||||
runcmd,
|
||||
runcmdsilent,
|
||||
mkdir,
|
||||
md5_file,
|
||||
configure_environment,
|
||||
)
|
||||
|
||||
# This file builds Chromium headless on Windows, Mac, and Linux.
|
||||
|
@ -13,11 +11,10 @@ from build_util import (
|
|||
# Verify that we have an argument, and if not print instructions
|
||||
if (len(sys.argv) < 2):
|
||||
print('Usage:')
|
||||
print('python build.py {chromium_version} [arch_name]')
|
||||
print('python build.py {chromium_version} {arch_name}')
|
||||
print('Example:')
|
||||
print('python build.py 68.0.3440.106')
|
||||
print('python build.py 4747cc23ae334a57a35ed3c8e6adcdbc8a50d479')
|
||||
print('python build.py 4747cc23ae334a57a35ed3c8e6adcdbc8a50d479 arm64 # build for ARM architecture')
|
||||
print('python build.py 4747cc23ae334a57a35ed3c8e6adcdbc8a50d479 x64')
|
||||
print('python build.py 4747cc23ae334a57a35ed3c8e6adcdbc8a50d479 arm64 # cross-compile for ARM architecture')
|
||||
print
|
||||
sys.exit(1)
|
||||
|
||||
|
@ -57,25 +54,34 @@ if checked_out != 0:
|
|||
print('Creating a new branch for tracking the source version')
|
||||
runcmd('git checkout -b build-' + base_version + ' ' + source_version)
|
||||
|
||||
# configure environment: environment path
|
||||
depot_tools_path = os.path.join(build_path, 'depot_tools')
|
||||
path_value = depot_tools_path + os.pathsep + os.environ['PATH']
|
||||
print('Updating PATH for depot_tools: ' + path_value)
|
||||
os.environ['PATH'] = path_value
|
||||
full_path = depot_tools_path + os.pathsep + os.environ['PATH']
|
||||
print('Updating PATH for depot_tools: ' + full_path)
|
||||
os.environ['PATH'] = full_path
|
||||
|
||||
# configure environment: build dependencies
|
||||
if platform.system() == 'Linux':
|
||||
if arch_name:
|
||||
print('Running sysroot install script...')
|
||||
runcmd(src_path + '/build/linux/sysroot_scripts/install-sysroot.py --arch=' + arch_name)
|
||||
print('Running install-build-deps...')
|
||||
runcmd(src_path + '/build/install-build-deps.sh')
|
||||
|
||||
|
||||
print('Updating all modules')
|
||||
runcmd('gclient sync')
|
||||
runcmd('gclient sync -D')
|
||||
|
||||
print('Setting up build directory')
|
||||
runcmd('rm -rf out/headless')
|
||||
runcmd('mkdir out/headless')
|
||||
|
||||
# Copy build args/{Linux | Darwin | Windows}.gn from the root of our directory to out/headless/args.gn,
|
||||
argsgn_destination = path.abspath('out/headless/args.gn')
|
||||
print('Generating platform-specific args')
|
||||
mkdir('out/headless')
|
||||
print(' > cp ' + argsgn_file + ' ' + argsgn_destination)
|
||||
shutil.copyfile(argsgn_file, argsgn_destination)
|
||||
|
||||
# add the target_cpu for cross-compilation
|
||||
print('Adding target_cpu to args')
|
||||
|
||||
f = open('out/headless/args.gn', 'a')
|
||||
f.write('\rtarget_cpu = "' + arch_name + '"\r')
|
||||
f.close()
|
||||
argsgn_file_out = path.abspath('out/headless/args.gn')
|
||||
runcmd('cp ' + argsgn_file + ' ' + argsgn_file_out)
|
||||
runcmd('echo \'target_cpu="' + arch_name + '"\' >> ' + argsgn_file_out)
|
||||
|
||||
runcmd('gn gen out/headless')
|
||||
|
||||
|
@ -136,3 +142,6 @@ archive.close()
|
|||
print('Creating ' + path.join(src_path, md5_filename))
|
||||
with open (md5_filename, 'w') as f:
|
||||
f.write(md5_file(zip_filename))
|
||||
|
||||
runcmd('gsutil cp ' + path.join(src_path, zip_filename) + ' gs://headless_shell_staging')
|
||||
runcmd('gsutil cp ' + path.join(src_path, md5_filename) + ' gs://headless_shell_staging')
|
||||
|
|
|
@ -27,19 +27,3 @@ def md5_file(filename):
|
|||
for chunk in iter(lambda: f.read(128 * md5.block_size), b''):
|
||||
md5.update(chunk)
|
||||
return md5.hexdigest()
|
||||
|
||||
def configure_environment(arch_name, build_path, src_path):
|
||||
"""Runs install scripts for deps, and configures temporary environment variables required by Chromium's build"""
|
||||
|
||||
if platform.system() == 'Linux':
|
||||
if arch_name:
|
||||
print('Running sysroot install script...')
|
||||
sysroot_cmd = src_path + '/build/linux/sysroot_scripts/install-sysroot.py --arch=' + arch_name
|
||||
runcmd(sysroot_cmd)
|
||||
print('Running install-build-deps...')
|
||||
runcmd(src_path + '/build/install-build-deps.sh')
|
||||
|
||||
depot_tools_path = os.path.join(build_path, 'depot_tools')
|
||||
full_path = depot_tools_path + os.pathsep + os.environ['PATH']
|
||||
print('Updating PATH for depot_tools: ' + full_path)
|
||||
os.environ['PATH'] = full_path
|
||||
|
|
|
@ -19,8 +19,6 @@ use_alsa = false
|
|||
use_cups = false
|
||||
use_dbus = false
|
||||
use_gio = false
|
||||
# Please, consult @elastic/kibana-security before changing/removing this option.
|
||||
use_kerberos = false
|
||||
use_libpci = false
|
||||
use_pulseaudio = false
|
||||
use_udev = false
|
||||
|
@ -28,4 +26,8 @@ use_udev = false
|
|||
is_debug = false
|
||||
symbol_level = 0
|
||||
is_component_build = false
|
||||
remove_webcore_debug_symbols = true
|
||||
|
||||
# Please, consult @elastic/kibana-security before changing/removing this option.
|
||||
use_kerberos = false
|
||||
|
||||
# target_cpu is appended before build: "x64" or "arm64"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import os, platform, sys
|
||||
from os import path
|
||||
from build_util import runcmd, mkdir, md5_file, configure_environment
|
||||
from build_util import runcmd, mkdir
|
||||
|
||||
# This is a cross-platform initialization script which should only be run
|
||||
# once per environment, and isn't intended to be run directly. You should
|
||||
|
@ -44,6 +44,3 @@ if not path.isdir(chromium_dir):
|
|||
runcmd('fetch chromium --nohooks=1 --no-history=1')
|
||||
else:
|
||||
print('Directory exists: ' + chromium_dir + '. Skipping chromium fetch.')
|
||||
|
||||
# This depends on having the chromium/src directory with the complete checkout
|
||||
configure_environment(arch_name, build_path, src_path)
|
||||
|
|
|
@ -2,7 +2,8 @@ import("//build/args/headless.gn")
|
|||
is_debug = false
|
||||
symbol_level = 0
|
||||
is_component_build = false
|
||||
remove_webcore_debug_symbols = true
|
||||
enable_nacl = false
|
||||
# Please, consult @elastic/kibana-security before changing/removing this option.
|
||||
use_kerberos = false
|
||||
|
||||
# target_cpu is appended before build: "x64" or "arm64"
|
||||
|
|
|
@ -18,10 +18,12 @@ use_gio = false
|
|||
use_libpci = false
|
||||
use_pulseaudio = false
|
||||
use_udev = false
|
||||
# Please, consult @elastic/kibana-security before changing/removing this option.
|
||||
use_kerberos = false
|
||||
|
||||
is_debug = false
|
||||
symbol_level = 0
|
||||
is_component_build = false
|
||||
remove_webcore_debug_symbols = true
|
||||
|
||||
# Please, consult @elastic/kibana-security before changing/removing this option.
|
||||
use_kerberos = false
|
||||
|
||||
# target_cpu is appended before build: "x64" or "arm64"
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { map, truncate } from 'lodash';
|
||||
import open from 'opn';
|
||||
import { ElementHandle, EvaluateFn, Page, Response, SerializableOrJSHandle } from 'puppeteer';
|
||||
import puppeteer, { ElementHandle, EvaluateFn, SerializableOrJSHandle } from 'puppeteer';
|
||||
import { parse as parseUrl } from 'url';
|
||||
import { getDisallowedOutgoingUrlError } from '../';
|
||||
import { ConditionalHeaders, ConditionalHeadersConditions } from '../../../export_types/common';
|
||||
|
@ -53,14 +53,14 @@ interface InterceptedRequest {
|
|||
const WAIT_FOR_DELAY_MS: number = 100;
|
||||
|
||||
export class HeadlessChromiumDriver {
|
||||
private readonly page: Page;
|
||||
private readonly page: puppeteer.Page;
|
||||
private readonly inspect: boolean;
|
||||
private readonly networkPolicy: NetworkPolicy;
|
||||
|
||||
private listenersAttached = false;
|
||||
private interceptedCount = 0;
|
||||
|
||||
constructor(page: Page, { inspect, networkPolicy }: ChromiumDriverOptions) {
|
||||
constructor(page: puppeteer.Page, { inspect, networkPolicy }: ChromiumDriverOptions) {
|
||||
this.page = page;
|
||||
this.inspect = inspect;
|
||||
this.networkPolicy = networkPolicy;
|
||||
|
@ -127,7 +127,7 @@ export class HeadlessChromiumDriver {
|
|||
/*
|
||||
* Call Page.screenshot and return a base64-encoded string of the image
|
||||
*/
|
||||
public async screenshot(elementPosition: ElementPosition): Promise<string> {
|
||||
public async screenshot(elementPosition: ElementPosition): Promise<string | void> {
|
||||
const { boundingClientRect, scroll } = elementPosition;
|
||||
const screenshot = await this.page.screenshot({
|
||||
clip: {
|
||||
|
@ -138,7 +138,10 @@ export class HeadlessChromiumDriver {
|
|||
},
|
||||
});
|
||||
|
||||
return screenshot.toString('base64');
|
||||
if (screenshot) {
|
||||
return screenshot.toString('base64');
|
||||
}
|
||||
return screenshot;
|
||||
}
|
||||
|
||||
public async evaluate(
|
||||
|
@ -160,6 +163,11 @@ export class HeadlessChromiumDriver {
|
|||
const { timeout } = opts;
|
||||
logger.debug(`waitForSelector ${selector}`);
|
||||
const resp = await this.page.waitForSelector(selector, { timeout }); // override default 30000ms
|
||||
|
||||
if (!resp) {
|
||||
throw new Error(`Failure in waitForSelector: void response! Context: ${context.context}`);
|
||||
}
|
||||
|
||||
logger.debug(`waitForSelector ${selector} resolved`);
|
||||
return resp;
|
||||
}
|
||||
|
@ -219,6 +227,7 @@ export class HeadlessChromiumDriver {
|
|||
}
|
||||
|
||||
// @ts-ignore
|
||||
// FIXME: use `await page.target().createCDPSession();`
|
||||
const client = this.page._client;
|
||||
|
||||
// We have to reach into the Chrome Devtools Protocol to apply headers as using
|
||||
|
@ -293,7 +302,7 @@ export class HeadlessChromiumDriver {
|
|||
// Even though 3xx redirects go through our request
|
||||
// handler, we should probably inspect responses just to
|
||||
// avoid being bamboozled by some malicious request
|
||||
this.page.on('response', (interceptedResponse: Response) => {
|
||||
this.page.on('response', (interceptedResponse: puppeteer.Response) => {
|
||||
const interceptedUrl = interceptedResponse.url();
|
||||
const allowed = !interceptedUrl.startsWith('file://');
|
||||
|
||||
|
@ -315,17 +324,17 @@ export class HeadlessChromiumDriver {
|
|||
|
||||
private async launchDebugger() {
|
||||
// In order to pause on execution we have to reach more deeply into Chromiums Devtools Protocol,
|
||||
// and more specifically, for the page being used. _client is per-page, and puppeteer doesn't expose
|
||||
// a page's client in their api, so we have to reach into internals to get this behavior.
|
||||
// Finally, in order to get the inspector running, we have to know the page's internal ID (again, private)
|
||||
// and more specifically, for the page being used. _client is per-page.
|
||||
// In order to get the inspector running, we have to know the page's internal ID (again, private)
|
||||
// in order to construct the final debugging URL.
|
||||
|
||||
const target = this.page.target();
|
||||
const client = await target.createCDPSession();
|
||||
|
||||
await client.send('Debugger.enable');
|
||||
await client.send('Debugger.pause');
|
||||
// @ts-ignore
|
||||
await this.page._client.send('Debugger.enable');
|
||||
// @ts-ignore
|
||||
await this.page._client.send('Debugger.pause');
|
||||
// @ts-ignore
|
||||
const targetId = this.page._target._targetId;
|
||||
const targetId = target._targetId;
|
||||
const wsEndpoint = this.page.browser().wsEndpoint();
|
||||
const { port } = parseUrl(wsEndpoint);
|
||||
|
||||
|
|
|
@ -193,6 +193,10 @@ export class HeadlessChromiumDriverFactory {
|
|||
// Puppeteer doesn't give a handle to the original ChildProcess object
|
||||
// See https://github.com/GoogleChrome/puppeteer/issues/1292#issuecomment-521470627
|
||||
|
||||
if (childProcess == null) {
|
||||
throw new TypeError('childProcess is null or undefined!');
|
||||
}
|
||||
|
||||
// just log closing of the process
|
||||
const processClose$ = Rx.fromEvent<void>(childProcess, 'close').pipe(
|
||||
tap(() => {
|
||||
|
|
|
@ -10,16 +10,17 @@ import { spawn } from 'child_process';
|
|||
import del from 'del';
|
||||
import { mkdtempSync } from 'fs';
|
||||
import { uniq } from 'lodash';
|
||||
import { tmpdir } from 'os';
|
||||
import os, { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
import { createInterface } from 'readline';
|
||||
import { fromEvent, timer, merge, of } from 'rxjs';
|
||||
import { takeUntil, map, reduce, tap, catchError } from 'rxjs/operators';
|
||||
import { ReportingCore } from '../../..';
|
||||
import { fromEvent, merge, of, timer } from 'rxjs';
|
||||
import { catchError, map, reduce, takeUntil, tap } from 'rxjs/operators';
|
||||
import { ReportingCore } from '../../../';
|
||||
import { LevelLogger } from '../../../lib';
|
||||
import { getBinaryPath } from '../../install';
|
||||
import { ChromiumArchivePaths } from '../paths';
|
||||
import { args } from './args';
|
||||
|
||||
const paths = new ChromiumArchivePaths();
|
||||
const browserLaunchTimeToWait = 5 * 1000;
|
||||
|
||||
// Default args used by pptr
|
||||
|
@ -61,7 +62,15 @@ export const browserStartLogs = (
|
|||
const proxy = config.get('capture', 'browser', 'chromium', 'proxy');
|
||||
const disableSandbox = config.get('capture', 'browser', 'chromium', 'disableSandbox');
|
||||
const userDataDir = mkdtempSync(join(tmpdir(), 'chromium-'));
|
||||
const binaryPath = getBinaryPath();
|
||||
|
||||
const platform = process.platform;
|
||||
const architecture = os.arch();
|
||||
const pkg = paths.find(platform, architecture);
|
||||
if (!pkg) {
|
||||
throw new Error(`Unsupported platform: ${platform}-${architecture}`);
|
||||
}
|
||||
const binaryPath = paths.getBinaryPath(pkg);
|
||||
|
||||
const kbnArgs = args({
|
||||
userDataDir,
|
||||
viewport: { width: 800, height: 600 },
|
||||
|
|
|
@ -10,10 +10,10 @@ import { BrowserDownload } from '../';
|
|||
import { CaptureConfig } from '../../../server/types';
|
||||
import { LevelLogger } from '../../lib';
|
||||
import { HeadlessChromiumDriverFactory } from './driver_factory';
|
||||
import { paths } from './paths';
|
||||
import { ChromiumArchivePaths } from './paths';
|
||||
|
||||
export const chromium: BrowserDownload = {
|
||||
paths,
|
||||
paths: new ChromiumArchivePaths(),
|
||||
createDriverFactory: (binaryPath: string, captureConfig: CaptureConfig, logger: LevelLogger) =>
|
||||
new HeadlessChromiumDriverFactory(binaryPath, captureConfig, logger),
|
||||
};
|
||||
|
@ -32,3 +32,5 @@ export const getDisallowedOutgoingUrlError = (interceptedUrl: string) =>
|
|||
values: { interceptedUrl },
|
||||
})
|
||||
);
|
||||
|
||||
export { ChromiumArchivePaths };
|
||||
|
|
|
@ -7,12 +7,24 @@
|
|||
|
||||
import path from 'path';
|
||||
|
||||
export const paths = {
|
||||
archivesPath: path.resolve(__dirname, '../../../../../../.chromium'),
|
||||
baseUrl: 'https://storage.googleapis.com/headless_shell/',
|
||||
packages: [
|
||||
interface PackageInfo {
|
||||
platform: string;
|
||||
architecture: string;
|
||||
archiveFilename: string;
|
||||
archiveChecksum: string;
|
||||
binaryChecksum: string;
|
||||
binaryRelativePath: string;
|
||||
}
|
||||
|
||||
// We download zip files from a Kibana team GCS bucket named `headless_shell`
|
||||
enum BaseUrl {
|
||||
custom = 'https://storage.googleapis.com/headless_shell',
|
||||
}
|
||||
|
||||
export class ChromiumArchivePaths {
|
||||
public readonly packages: PackageInfo[] = [
|
||||
{
|
||||
platforms: ['darwin', 'freebsd', 'openbsd'],
|
||||
platform: 'darwin',
|
||||
architecture: 'x64',
|
||||
archiveFilename: 'chromium-ef768c9-darwin_x64.zip',
|
||||
archiveChecksum: 'd87287f6b2159cff7c64babac873cc73',
|
||||
|
@ -20,7 +32,7 @@ export const paths = {
|
|||
binaryRelativePath: 'headless_shell-darwin_x64/headless_shell',
|
||||
},
|
||||
{
|
||||
platforms: ['linux'],
|
||||
platform: 'linux',
|
||||
architecture: 'x64',
|
||||
archiveFilename: 'chromium-ef768c9-linux_x64.zip',
|
||||
archiveChecksum: '85575e8fd56849f4de5e3584e05712c0',
|
||||
|
@ -28,7 +40,7 @@ export const paths = {
|
|||
binaryRelativePath: 'headless_shell-linux_x64/headless_shell',
|
||||
},
|
||||
{
|
||||
platforms: ['linux'],
|
||||
platform: 'linux',
|
||||
architecture: 'arm64',
|
||||
archiveFilename: 'chromium-ef768c9-linux_arm64.zip',
|
||||
archiveChecksum: '20b09b70476bea76a276c583bf72eac7',
|
||||
|
@ -36,12 +48,36 @@ export const paths = {
|
|||
binaryRelativePath: 'headless_shell-linux_arm64/headless_shell',
|
||||
},
|
||||
{
|
||||
platforms: ['win32'],
|
||||
platform: 'win32',
|
||||
architecture: 'x64',
|
||||
archiveFilename: 'chromium-ef768c9-windows_x64.zip',
|
||||
archiveChecksum: '33301c749b5305b65311742578c52f15',
|
||||
binaryChecksum: '9f28dd56c7a304a22bf66f0097fa4de9',
|
||||
binaryRelativePath: 'headless_shell-windows_x64\\headless_shell.exe',
|
||||
},
|
||||
],
|
||||
};
|
||||
];
|
||||
|
||||
// zip files get downloaded to a .chromium directory in the kibana root
|
||||
public readonly archivesPath = path.resolve(__dirname, '../../../../../../.chromium');
|
||||
|
||||
public find(platform: string, architecture: string) {
|
||||
return this.packages.find((p) => p.platform === platform && p.architecture === architecture);
|
||||
}
|
||||
|
||||
public resolvePath(p: PackageInfo) {
|
||||
return path.resolve(this.archivesPath, p.archiveFilename);
|
||||
}
|
||||
|
||||
public getAllArchiveFilenames(): string[] {
|
||||
return this.packages.map((p) => this.resolvePath(p));
|
||||
}
|
||||
|
||||
public getDownloadUrl(p: PackageInfo) {
|
||||
return BaseUrl.custom + `/${p.archiveFilename}`;
|
||||
}
|
||||
|
||||
public getBinaryPath(p: PackageInfo) {
|
||||
const chromiumPath = path.resolve(__dirname, '../../../chromium');
|
||||
return path.join(chromiumPath, p.binaryRelativePath);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,8 +7,13 @@
|
|||
|
||||
import { createHash } from 'crypto';
|
||||
import { createReadStream } from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
import { readableEnd } from './util';
|
||||
function readableEnd(stream: Readable) {
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('error', reject).on('end', resolve);
|
||||
});
|
||||
}
|
||||
|
||||
export async function md5(path: string) {
|
||||
const hash = createHash('md5');
|
||||
|
|
|
@ -9,7 +9,6 @@ import del from 'del';
|
|||
import { readdirSync } from 'fs';
|
||||
import { resolve as resolvePath } from 'path';
|
||||
import { GenericLevelLogger } from '../../lib/level_logger';
|
||||
import { asyncMap } from './util';
|
||||
|
||||
/**
|
||||
* Delete any file in the `dir` that is not in the expectedPaths
|
||||
|
@ -17,7 +16,7 @@ import { asyncMap } from './util';
|
|||
export async function clean(dir: string, expectedPaths: string[], logger: GenericLevelLogger) {
|
||||
let filenames: string[];
|
||||
try {
|
||||
filenames = await readdirSync(dir);
|
||||
filenames = readdirSync(dir);
|
||||
} catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
// directory doesn't exist, that's as clean as it gets
|
||||
|
@ -27,11 +26,13 @@ export async function clean(dir: string, expectedPaths: string[], logger: Generi
|
|||
throw error;
|
||||
}
|
||||
|
||||
await asyncMap(filenames, async (filename) => {
|
||||
const path = resolvePath(dir, filename);
|
||||
if (!expectedPaths.includes(path)) {
|
||||
logger.warning(`Deleting unexpected file ${path}`);
|
||||
await del(path, { force: true });
|
||||
}
|
||||
});
|
||||
await Promise.all(
|
||||
filenames.map(async (filename) => {
|
||||
const path = resolvePath(dir, filename);
|
||||
if (!expectedPaths.includes(path)) {
|
||||
logger.warning(`Deleting unexpected file ${path}`);
|
||||
await del(path, { force: true });
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
|
|
@ -13,11 +13,12 @@ import { GenericLevelLogger } from '../../lib/level_logger';
|
|||
|
||||
/**
|
||||
* Download a url and calculate it's checksum
|
||||
* @param {String} url
|
||||
* @param {String} path
|
||||
* @return {Promise<String>} checksum of the downloaded file
|
||||
*/
|
||||
export async function download(url: string, path: string, logger: GenericLevelLogger) {
|
||||
export async function download(
|
||||
url: string,
|
||||
path: string,
|
||||
logger: GenericLevelLogger
|
||||
): Promise<string> {
|
||||
logger.info(`Downloading ${url} to ${path}`);
|
||||
|
||||
const hash = createHash('md5');
|
||||
|
|
|
@ -6,13 +6,11 @@
|
|||
*/
|
||||
|
||||
import { existsSync } from 'fs';
|
||||
import { resolve as resolvePath } from 'path';
|
||||
import { BrowserDownload, chromium } from '../';
|
||||
import { GenericLevelLogger } from '../../lib/level_logger';
|
||||
import { md5 } from './checksum';
|
||||
import { clean } from './clean';
|
||||
import { download } from './download';
|
||||
import { asyncMap } from './util';
|
||||
|
||||
/**
|
||||
* Check for the downloaded archive of each requested browser type and
|
||||
|
@ -31,39 +29,46 @@ export async function ensureBrowserDownloaded(logger: GenericLevelLogger) {
|
|||
* @return {Promise<undefined>}
|
||||
*/
|
||||
async function ensureDownloaded(browsers: BrowserDownload[], logger: GenericLevelLogger) {
|
||||
await asyncMap(browsers, async (browser) => {
|
||||
const { archivesPath } = browser.paths;
|
||||
await Promise.all(
|
||||
browsers.map(async ({ paths: pSet }) => {
|
||||
await clean(pSet.archivesPath, pSet.getAllArchiveFilenames(), logger);
|
||||
|
||||
await clean(
|
||||
archivesPath,
|
||||
browser.paths.packages.map((p) => resolvePath(archivesPath, p.archiveFilename)),
|
||||
logger
|
||||
);
|
||||
const invalidChecksums: string[] = [];
|
||||
await Promise.all(
|
||||
pSet.packages.map(async (p) => {
|
||||
const { archiveFilename, archiveChecksum } = p;
|
||||
if (archiveFilename && archiveChecksum) {
|
||||
const path = pSet.resolvePath(p);
|
||||
|
||||
const invalidChecksums: string[] = [];
|
||||
await asyncMap(browser.paths.packages, async ({ archiveFilename, archiveChecksum }) => {
|
||||
const url = `${browser.paths.baseUrl}${archiveFilename}`;
|
||||
const path = resolvePath(archivesPath, archiveFilename);
|
||||
if (existsSync(path) && (await md5(path)) === archiveChecksum) {
|
||||
logger.debug(`Browser archive exists in ${path}`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (existsSync(path) && (await md5(path)) === archiveChecksum) {
|
||||
logger.debug(`Browser archive exists in ${path}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const downloadedChecksum = await download(url, path, logger);
|
||||
if (downloadedChecksum !== archiveChecksum) {
|
||||
invalidChecksums.push(`${url} => ${path}`);
|
||||
}
|
||||
});
|
||||
|
||||
if (invalidChecksums.length) {
|
||||
const err = new Error(
|
||||
`Error downloading browsers, checksums incorrect for:\n - ${invalidChecksums.join(
|
||||
'\n - '
|
||||
)}`
|
||||
const url = pSet.getDownloadUrl(p);
|
||||
try {
|
||||
const downloadedChecksum = await download(url, path, logger);
|
||||
if (downloadedChecksum !== archiveChecksum) {
|
||||
invalidChecksums.push(`${url} => ${path}`);
|
||||
}
|
||||
} catch (err) {
|
||||
const message = new Error(`Failed to download ${url}`);
|
||||
logger.error(err);
|
||||
throw message;
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
logger.error(err);
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
|
||||
if (invalidChecksums.length) {
|
||||
const err = new Error(
|
||||
`Error downloading browsers, checksums incorrect for:\n - ${invalidChecksums.join(
|
||||
'\n - '
|
||||
)}`
|
||||
);
|
||||
logger.error(err);
|
||||
throw err;
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Readable } from 'stream';
|
||||
|
||||
/**
|
||||
* Iterate an array asynchronously and in parallel
|
||||
*/
|
||||
export function asyncMap<T, T2>(array: T[], asyncFn: (x: T) => T2): Promise<T2[]> {
|
||||
return Promise.all(array.map(asyncFn));
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for a readable stream to end
|
||||
*/
|
||||
export function readableEnd(stream: Readable) {
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on('error', reject).on('end', resolve);
|
||||
});
|
||||
}
|
|
@ -7,17 +7,17 @@
|
|||
|
||||
import fs from 'fs';
|
||||
import crypto from 'crypto';
|
||||
import { resolve } from 'path';
|
||||
import { resolve as pathResolve } from 'path';
|
||||
|
||||
import { extract } from './extract';
|
||||
import { ExtractError } from './extract_error';
|
||||
import { promisify } from 'util';
|
||||
|
||||
const FIXTURES_FOLDER = resolve(__dirname, '__fixtures__');
|
||||
const SRC_FILE_UNCOMPRESSED = resolve(FIXTURES_FOLDER, 'file.md');
|
||||
const FIXTURES_FOLDER = pathResolve(__dirname, '__fixtures__');
|
||||
const SRC_FILE_UNCOMPRESSED = pathResolve(FIXTURES_FOLDER, 'file.md');
|
||||
const SRC_FILE_COMPRESSED_ZIP = `${SRC_FILE_UNCOMPRESSED}.zip`;
|
||||
const EXTRACT_TARGET_FOLDER = resolve(FIXTURES_FOLDER, 'extract_target');
|
||||
const EXTRACT_TARGET_FILE = resolve(EXTRACT_TARGET_FOLDER, 'file.md');
|
||||
const EXTRACT_TARGET_FOLDER = pathResolve(FIXTURES_FOLDER, 'extract_target');
|
||||
const EXTRACT_TARGET_FILE = pathResolve(EXTRACT_TARGET_FOLDER, 'file.md');
|
||||
|
||||
const fsp = {
|
||||
mkdir: promisify(fs.mkdir),
|
||||
|
@ -25,7 +25,7 @@ const fsp = {
|
|||
unlink: promisify(fs.unlink),
|
||||
};
|
||||
|
||||
const ignoreErrorCodes = async (codes, promise) => {
|
||||
const ignoreErrorCodes = async (codes: string[], promise: Promise<void>) => {
|
||||
try {
|
||||
await promise;
|
||||
} catch (err) {
|
||||
|
@ -40,7 +40,7 @@ async function cleanup() {
|
|||
await ignoreErrorCodes(['ENOENT'], fsp.rmdir(EXTRACT_TARGET_FOLDER));
|
||||
}
|
||||
|
||||
function fileHash(filepath) {
|
||||
function fileHash(filepath: string) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
const input = fs.createReadStream(filepath);
|
|
@ -9,7 +9,7 @@ import path from 'path';
|
|||
import { unzip } from './unzip';
|
||||
import { ExtractError } from './extract_error';
|
||||
|
||||
export async function extract(archivePath, targetPath) {
|
||||
export async function extract(archivePath: string, targetPath: string) {
|
||||
const fileType = path.parse(archivePath).ext.substr(1);
|
||||
let unpacker;
|
||||
|
|
@ -6,7 +6,8 @@
|
|||
*/
|
||||
|
||||
export class ExtractError extends Error {
|
||||
constructor(cause, message = 'Failed to extract the browser archive') {
|
||||
public readonly cause: string;
|
||||
constructor(cause: string, message = 'Failed to extract the browser archive') {
|
||||
super(message);
|
||||
this.message = message;
|
||||
this.name = this.constructor.name;
|
|
@ -8,7 +8,7 @@
|
|||
import extractZip from 'extract-zip';
|
||||
import { ExtractError } from './extract_error';
|
||||
|
||||
export async function unzip(filepath, target) {
|
||||
export async function unzip(filepath: string, target: string) {
|
||||
try {
|
||||
await extractZip(filepath, { dir: target });
|
||||
} catch (err) {
|
|
@ -6,16 +6,16 @@
|
|||
*/
|
||||
|
||||
import { first } from 'rxjs/operators';
|
||||
import { ReportingConfig } from '../';
|
||||
import { LevelLogger } from '../lib';
|
||||
import { CaptureConfig } from '../types';
|
||||
import { chromium } from './chromium';
|
||||
import { chromium, ChromiumArchivePaths } from './chromium';
|
||||
import { HeadlessChromiumDriverFactory } from './chromium/driver_factory';
|
||||
import { installBrowser } from './install';
|
||||
import { ReportingConfig } from '..';
|
||||
|
||||
export { chromium } from './chromium';
|
||||
export { HeadlessChromiumDriver } from './chromium/driver';
|
||||
export { HeadlessChromiumDriverFactory } from './chromium/driver_factory';
|
||||
export { chromium } from './chromium';
|
||||
|
||||
type CreateDriverFactory = (
|
||||
binaryPath: string,
|
||||
|
@ -25,17 +25,7 @@ type CreateDriverFactory = (
|
|||
|
||||
export interface BrowserDownload {
|
||||
createDriverFactory: CreateDriverFactory;
|
||||
paths: {
|
||||
archivesPath: string;
|
||||
baseUrl: string;
|
||||
packages: Array<{
|
||||
archiveChecksum: string;
|
||||
archiveFilename: string;
|
||||
binaryChecksum: string;
|
||||
binaryRelativePath: string;
|
||||
platforms: string[];
|
||||
}>;
|
||||
};
|
||||
paths: ChromiumArchivePaths;
|
||||
}
|
||||
|
||||
export const initializeBrowserDriverFactory = async (
|
||||
|
|
|
@ -10,38 +10,11 @@ import os from 'os';
|
|||
import path from 'path';
|
||||
import * as Rx from 'rxjs';
|
||||
import { GenericLevelLogger } from '../lib/level_logger';
|
||||
import { paths } from './chromium/paths';
|
||||
import { ChromiumArchivePaths } from './chromium';
|
||||
import { ensureBrowserDownloaded } from './download';
|
||||
// @ts-ignore
|
||||
import { md5 } from './download/checksum';
|
||||
// @ts-ignore
|
||||
import { extract } from './extract';
|
||||
|
||||
interface Package {
|
||||
platforms: string[];
|
||||
architecture: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Small helper util to resolve where chromium is installed
|
||||
*/
|
||||
export const getBinaryPath = (
|
||||
chromiumPath: string = path.resolve(__dirname, '../../chromium'),
|
||||
platform: string = process.platform,
|
||||
architecture: string = os.arch()
|
||||
) => {
|
||||
const pkg = paths.packages.find((p: Package) => {
|
||||
return p.platforms.includes(platform) && p.architecture === architecture;
|
||||
});
|
||||
|
||||
if (!pkg) {
|
||||
// TODO: validate this
|
||||
throw new Error(`Unsupported platform: ${platform}-${architecture}`);
|
||||
}
|
||||
|
||||
return path.join(chromiumPath, pkg.binaryRelativePath);
|
||||
};
|
||||
|
||||
/**
|
||||
* "install" a browser by type into installs path by extracting the downloaded
|
||||
* archive. If there is an error extracting the archive an `ExtractError` is thrown
|
||||
|
@ -53,17 +26,16 @@ export function installBrowser(
|
|||
architecture: string = os.arch()
|
||||
): { binaryPath$: Rx.Subject<string> } {
|
||||
const binaryPath$ = new Rx.Subject<string>();
|
||||
|
||||
const paths = new ChromiumArchivePaths();
|
||||
const pkg = paths.find(platform, architecture);
|
||||
|
||||
if (!pkg) {
|
||||
throw new Error(`Unsupported platform: ${platform}-${architecture}`);
|
||||
}
|
||||
|
||||
const backgroundInstall = async () => {
|
||||
const pkg = paths.packages.find((p: Package) => {
|
||||
return p.platforms.includes(platform) && p.architecture === architecture;
|
||||
});
|
||||
|
||||
if (!pkg) {
|
||||
// TODO: validate this
|
||||
throw new Error(`Unsupported platform: ${platform}-${architecture}`);
|
||||
}
|
||||
|
||||
const binaryPath = getBinaryPath(chromiumPath, platform, architecture);
|
||||
const binaryPath = paths.getBinaryPath(pkg);
|
||||
const binaryChecksum = await md5(binaryPath).catch(() => '');
|
||||
|
||||
if (binaryChecksum !== pkg.binaryChecksum) {
|
||||
|
|
|
@ -80,6 +80,10 @@ export const getScreenshots = async (
|
|||
await resizeToClipArea(item, browser, layout.getBrowserZoom(), logger);
|
||||
const base64EncodedData = await browser.screenshot(item.position);
|
||||
|
||||
if (!base64EncodedData) {
|
||||
throw new Error(`Failure in getScreenshots! Base64 data is void`);
|
||||
}
|
||||
|
||||
screenshots.push({
|
||||
base64EncodedData,
|
||||
title: item.attributes.title,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue