[build] Creates Linux aarch64 archive (#69165) (#71358)

- Updates Linux Chromium builds to accept architecture argument (defaults to x64) for arm64 support.
  - Example: `python ~/chromium/build_chromium/build.py 312d84c8ce62810976feda0d3457108a6dfff9e6 arm64`
- Updates all Chromium builds to include architecture in filename.
  - `chromium-312d84c-linux_arm64.zip` _(new)_
  - `chromium-312d84c-linux.zip` > `chromium-312d84c-linux_x64.zip`
- Moves Chromium install from data directory to `x-pack/plugins/reporting/chromium`
- Moves Chromium download cache from `x-pack/plugins/reporting/.chromium` to `.chromium`
- Installs Chromium during build (closes #53664)
- Updates build to be architecture aware (x64 and aarch64)
- Removed Chromium debug logs, they were not helpful and can not be written inside the Kibana root. If we were to keep them, we would need to write to `logging.dist`.

Signed-off-by: Tyler Smalley <tyler.smalley@elastic.co>
# Conflicts:
#	.ci/packer_cache_for_branch.sh
#	x-pack/plugins/reporting/server/browsers/chromium/paths.ts

Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
Tyler Smalley 2020-07-13 15:37:29 -07:00 committed by GitHub
parent 3c7a9e1bfb
commit 1c8107bfd7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 218 additions and 254 deletions

View file

@ -1,6 +1,7 @@
**/*.js.snap
**/graphql/types.ts
/.es
/.chromium
/build
/built_assets
/config/apm.dev.js

1
.gitignore vendored
View file

@ -2,6 +2,7 @@
.signing-config.json
.ackrc
/.es
/.chromium
.DS_Store
.node_binaries
.native_modules

View file

@ -19,7 +19,7 @@ image::user/reporting/images/share-button.png["Share"]
[float]
== Setup
{reporting} is automatically enabled in {kib}. The first time {kib} runs, it extracts a custom build for the Chromium web browser, which
{reporting} is automatically enabled in {kib}. It runs a custom build of the Chromium web browser, which
runs on the server in headless mode to load {kib} and capture the rendered {kib} charts as images.
Chromium is an open-source project not related to Elastic, but the Chromium binary for {kib} has been custom-built by Elastic to ensure it

View file

@ -261,7 +261,7 @@ export class ClusterManager {
/debug\.log$/,
...pluginInternalDirsIgnore,
fromRoot('src/legacy/server/sass/__tmp__'),
fromRoot('x-pack/plugins/reporting/.chromium'),
fromRoot('x-pack/plugins/reporting/chromium'),
fromRoot('x-pack/plugins/security_solution/cypress'),
fromRoot('x-pack/plugins/apm/e2e'),
fromRoot('x-pack/plugins/apm/scripts'),

View file

@ -24,7 +24,7 @@ The majority of this logic is extracted from the grunt build that has existed fo
**Config**: [lib/config.js] defines the config used to execute tasks. It is mostly used to determine absolute paths to specific locations, and to get access to the Platforms.
**Platform**: [lib/platform.js] defines the Platform objects, which define the different platforms we build for. Use `config.getTargetPlatforms()` to get the list of platforms we are targeting in this build, `config.getNodePlatforms()` to get the list of platform we will download node for, or `config.getLinux/Windows/MacPlatform()` to get a specific platform.
**Platform**: [lib/platform.js] defines the Platform objects, which define the different platforms we build for. Use `config.getTargetPlatforms()` to get the list of platforms we are targeting in this build, `config.getNodePlatforms()` to get the list of platform we will download node for, or `config.getPlatform` to get a specific platform and architecture.
**Log**: We uses the `ToolingLog` defined in [../tooling_log/tooling_log.js]

View file

@ -20,16 +20,16 @@
import { getConfig, createRunner } from './lib';
import {
BuildKibanaPlatformPluginsTask,
BuildPackagesTask,
CleanClientModulesOnDLLTask,
CleanEmptyFoldersTask,
CleanExtraBinScriptsTask,
CleanExtraBrowsersTask,
CleanExtraFilesFromModulesTask,
CleanPackagesTask,
CleanTypescriptTask,
CleanNodeBuildsTask,
CleanPackagesTask,
CleanTask,
CleanTypescriptTask,
CopyBinScriptsTask,
CopySourceTask,
CreateArchivesSourcesTask,
@ -44,20 +44,20 @@ import {
CreateRpmPackageTask,
DownloadNodeBuildsTask,
ExtractNodeBuildsTask,
InstallChromiumTask,
InstallDependenciesTask,
BuildKibanaPlatformPluginsTask,
OptimizeBuildTask,
PatchNativeModulesTask,
PathLengthTask,
RemovePackageJsonDepsTask,
RemoveWorkspacesTask,
TranspileBabelTask,
TranspileScssTask,
UpdateLicenseFileTask,
UuidVerificationTask,
VerifyEnvTask,
VerifyExistingNodeBuildsTask,
PathLengthTask,
WriteShaSumsTask,
UuidVerificationTask,
} from './tasks';
export async function buildDistributables(options) {
@ -134,12 +134,12 @@ export async function buildDistributables(options) {
/**
* copy generic build outputs into platform-specific build
* directories and perform platform-specific steps
* directories and perform platform/architecture-specific steps
*/
await run(CreateArchivesSourcesTask);
await run(PatchNativeModulesTask);
await run(InstallChromiumTask);
await run(CleanExtraBinScriptsTask);
await run(CleanExtraBrowsersTask);
await run(CleanNodeBuildsTask);
await run(PathLengthTask);

View file

@ -72,15 +72,31 @@ describe('dev/build/lib/config', () => {
});
});
describe('#getPlatform()', () => {
it('throws error when platform does not exist', async () => {
const { config } = await setup();
const fn = () => config.getPlatform('foo', 'x64');
expect(fn).to.throwException(/Unable to find platform/);
});
it('throws error when architecture does not exist', async () => {
const { config } = await setup();
const fn = () => config.getPlatform('linux', 'foo');
expect(fn).to.throwException(/Unable to find platform/);
});
});
describe('#getTargetPlatforms()', () => {
it('returns an array of all platform objects', async () => {
const { config } = await setup();
expect(
config
.getTargetPlatforms()
.map((p) => p.getName())
.map((p) => p.getNodeArch())
.sort()
).to.eql(['darwin', 'linux', 'windows']);
).to.eql(['darwin-x64', 'linux-arm64', 'linux-x64', 'win32-x64']);
});
it('returns just this platform when targetAllPlatforms = false', async () => {
@ -99,9 +115,9 @@ describe('dev/build/lib/config', () => {
expect(
config
.getTargetPlatforms()
.map((p) => p.getName())
.map((p) => p.getNodeArch())
.sort()
).to.eql(['darwin', 'linux', 'windows']);
).to.eql(['darwin-x64', 'linux-arm64', 'linux-x64', 'win32-x64']);
});
it('returns this platform and linux, when targetAllPlatforms = false', async () => {
@ -111,39 +127,20 @@ describe('dev/build/lib/config', () => {
if (process.platform !== 'linux') {
expect(platforms).to.have.length(2);
expect(platforms[0]).to.be(config.getPlatformForThisOs());
expect(platforms[1]).to.be(config.getLinuxPlatform());
expect(platforms[1]).to.be(config.getPlatform('linux', 'x64'));
} else {
expect(platforms).to.have.length(1);
expect(platforms[0]).to.be(config.getLinuxPlatform());
expect(platforms[0]).to.be(config.getPlatform('linux', 'x64'));
}
});
});
describe('#getLinuxPlatform()', () => {
it('returns the linux platform', async () => {
const { config } = await setup();
expect(config.getLinuxPlatform().getName()).to.be('linux');
});
});
describe('#getWindowsPlatform()', () => {
it('returns the windows platform', async () => {
const { config } = await setup();
expect(config.getWindowsPlatform().getName()).to.be('windows');
});
});
describe('#getMacPlatform()', () => {
it('returns the mac platform', async () => {
const { config } = await setup();
expect(config.getMacPlatform().getName()).to.be('darwin');
});
});
describe('#getPlatformForThisOs()', () => {
it('returns the platform that matches the arch of this machine', async () => {
const { config } = await setup();
expect(config.getPlatformForThisOs().getName()).to.be(process.platform);
const currentPlatform = config.getPlatformForThisOs();
expect(currentPlatform.getName()).to.be(process.platform);
expect(currentPlatform.getArchitecture()).to.be(process.arch);
});
});

View file

@ -30,37 +30,39 @@ describe('src/dev/build/lib/platform', () => {
describe('getNodeArch()', () => {
it('returns the node arch for the passed name', () => {
expect(createPlatform('windows').getNodeArch()).to.be('windows-x64');
expect(createPlatform('win32', 'x64').getNodeArch()).to.be('win32-x64');
});
});
describe('getBuildName()', () => {
it('returns the build name for the passed name', () => {
expect(createPlatform('windows').getBuildName()).to.be('windows-x86_64');
expect(createPlatform('linux', 'arm64', 'linux-aarch64').getBuildName()).to.be(
'linux-aarch64'
);
});
});
describe('isWindows()', () => {
it('returns true if name is windows', () => {
expect(createPlatform('windows').isWindows()).to.be(true);
expect(createPlatform('linux').isWindows()).to.be(false);
expect(createPlatform('darwin').isWindows()).to.be(false);
it('returns true if name is win32', () => {
expect(createPlatform('win32', 'x64').isWindows()).to.be(true);
expect(createPlatform('linux', 'x64').isWindows()).to.be(false);
expect(createPlatform('darwin', 'x64').isWindows()).to.be(false);
});
});
describe('isLinux()', () => {
it('returns true if name is linux', () => {
expect(createPlatform('windows').isLinux()).to.be(false);
expect(createPlatform('linux').isLinux()).to.be(true);
expect(createPlatform('darwin').isLinux()).to.be(false);
expect(createPlatform('win32', 'x64').isLinux()).to.be(false);
expect(createPlatform('linux', 'x64').isLinux()).to.be(true);
expect(createPlatform('darwin', 'x64').isLinux()).to.be(false);
});
});
describe('isMac()', () => {
it('returns true if name is darwin', () => {
expect(createPlatform('windows').isMac()).to.be(false);
expect(createPlatform('linux').isMac()).to.be(false);
expect(createPlatform('darwin').isMac()).to.be(true);
expect(createPlatform('win32', 'x64').isMac()).to.be(false);
expect(createPlatform('linux', 'x64').isMac()).to.be(false);
expect(createPlatform('darwin', 'x64').isMac()).to.be(true);
});
});
});

View file

@ -18,7 +18,7 @@
*/
import { dirname, resolve, relative } from 'path';
import { platform as getOsPlatform } from 'os';
import os from 'os';
import { getVersionInfo } from './version_info';
import { createPlatform } from './platform';
@ -29,7 +29,12 @@ export async function getConfig({ isRelease, targetAllPlatforms, versionQualifie
const repoRoot = dirname(pkgPath);
const nodeVersion = pkg.engines.node;
const platforms = ['darwin', 'linux', 'windows'].map(createPlatform);
const platforms = [
createPlatform('linux', 'x64', 'linux-x86_64'),
createPlatform('linux', 'arm64', 'linux-aarch64'),
createPlatform('darwin', 'x64', 'darwin-x86_64'),
createPlatform('win32', 'x64', 'windows-x86_64'),
];
const versionInfo = await getVersionInfo({
isRelease,
@ -101,34 +106,22 @@ export async function getConfig({ isRelease, targetAllPlatforms, versionQualifie
}
if (process.platform === 'linux') {
return [this.getLinuxPlatform()];
return [this.getPlatform('linux', 'x64')];
}
return [this.getPlatformForThisOs(), this.getLinuxPlatform()];
return [this.getPlatformForThisOs(), this.getPlatform('linux', 'x64')];
}
/**
* Get the linux platform object
* @return {Platform}
*/
getLinuxPlatform() {
return platforms.find((p) => p.isLinux());
}
getPlatform(name, arch) {
const selected = platforms.find((p) => {
return name === p.getName() && arch === p.getArchitecture();
});
/**
* Get the windows platform object
* @return {Platform}
*/
getWindowsPlatform() {
return platforms.find((p) => p.isWindows());
}
if (!selected) {
throw new Error(`Unable to find platform (${name}) with architecture (${arch})`);
}
/**
* Get the mac platform object
* @return {Platform}
*/
getMacPlatform() {
return platforms.find((p) => p.isMac());
return selected;
}
/**
@ -136,16 +129,7 @@ export async function getConfig({ isRelease, targetAllPlatforms, versionQualifie
* @return {Platform}
*/
getPlatformForThisOs() {
switch (getOsPlatform()) {
case 'darwin':
return this.getMacPlatform();
case 'win32':
return this.getWindowsPlatform();
case 'linux':
return this.getLinuxPlatform();
default:
throw new Error(`Unable to find platform for this os`);
}
return this.getPlatform(os.platform(), os.arch());
}
/**

View file

@ -17,22 +17,26 @@
* under the License.
*/
export function createPlatform(name) {
export function createPlatform(name, architecture, buildName) {
return new (class Platform {
getName() {
return name;
}
getNodeArch() {
return `${name}-x64`;
getArchitecture() {
return architecture;
}
getBuildName() {
return `${name}-x86_64`;
return buildName;
}
getNodeArch() {
return `${name}-${architecture}`;
}
isWindows() {
return name === 'windows';
return name === 'win32';
}
isMac() {

View file

@ -201,45 +201,6 @@ export const CleanExtraBinScriptsTask = {
},
};
export const CleanExtraBrowsersTask = {
description: 'Cleaning extra browsers from platform-specific builds',
async run(config, log, build) {
const getBrowserPathsForPlatform = (platform) => {
const reportingDir = 'x-pack/plugins/reporting';
const chromiumDir = '.chromium';
const chromiumPath = (p) =>
build.resolvePathForPlatform(platform, reportingDir, chromiumDir, p);
return (platforms) => {
const paths = [];
if (platforms.windows) {
paths.push(chromiumPath('chromium-*-win32.zip'));
paths.push(chromiumPath('chromium-*-windows.zip'));
}
if (platforms.darwin) {
paths.push(chromiumPath('chromium-*-darwin.zip'));
}
if (platforms.linux) {
paths.push(chromiumPath('chromium-*-linux.zip'));
}
return paths;
};
};
for (const platform of config.getNodePlatforms()) {
const getBrowserPaths = getBrowserPathsForPlatform(platform);
if (platform.isWindows()) {
await deleteAll(getBrowserPaths({ linux: true, darwin: true }), log);
} else if (platform.isMac()) {
await deleteAll(getBrowserPaths({ linux: true, windows: true }), log);
} else if (platform.isLinux()) {
await deleteAll(getBrowserPaths({ windows: true, darwin: true }), log);
}
}
},
};
export const CleanEmptyFoldersTask = {
description: 'Cleaning all empty folders recursively',

View file

@ -33,7 +33,7 @@ export const CreateArchivesSourcesTask = {
log.debug(
'Generic build source copied into',
platform.getName(),
platform.getNodeArch(),
'specific build directory'
);
@ -43,7 +43,7 @@ export const CreateArchivesSourcesTask = {
destination: build.resolvePathForPlatform(platform, 'node'),
});
log.debug('Node.js copied into', platform.getName(), 'specific build directory');
log.debug('Node.js copied into', platform.getNodeArch(), 'specific build directory');
})
);
},

View file

@ -18,6 +18,7 @@
*/
export * from './bin';
export * from './build_kibana_platform_plugins';
export * from './build_packages_task';
export * from './clean_tasks';
export * from './copy_source_task';
@ -26,18 +27,18 @@ export * from './create_archives_task';
export * from './create_empty_dirs_and_files_task';
export * from './create_package_json_task';
export * from './create_readme_task';
export * from './install_chromium';
export * from './install_dependencies_task';
export * from './license_file_task';
export * from './nodejs';
export * from './nodejs_modules';
export * from './nodejs';
export * from './notice_file_task';
export * from './optimize_task';
export * from './os_packages';
export * from './patch_native_modules_task';
export * from './path_length_task';
export * from './transpile_babel_task';
export * from './transpile_scss_task';
export * from './uuid_verification_task';
export * from './verify_env_task';
export * from './write_sha_sums_task';
export * from './path_length_task';
export * from './build_kibana_platform_plugins';
export * from './uuid_verification_task';

View file

@ -0,0 +1,44 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
import { installBrowser } from '../../../../x-pack/plugins/reporting/server/browsers/install';
import { first } from 'rxjs/operators';
export const InstallChromiumTask = {
description: 'Installing Chromium',
async run(config, log, build) {
if (build.isOss()) {
return;
} else {
for (const platform of config.getNodePlatforms()) {
log.info(`Installing Chromium for ${platform.getName()}-${platform.getArchitecture()}`);
const { binaryPath$ } = installBrowser(
log,
build.resolvePathForPlatform(platform, 'x-pack/plugins/reporting/chromium'),
platform.getName(),
platform.getArchitecture()
);
await binaryPath$.pipe(first()).toPromise();
}
}
},
};

View file

@ -47,7 +47,7 @@ export const CreateNoticeFileTask = {
log.info('Generating build notice');
const { extractDir: nodeDir, version: nodeVersion } = getNodeDownloadInfo(
config,
config.getLinuxPlatform()
config.getPlatform('linux', 'x64')
);
const notice = await generateBuildNoticeText({

View file

@ -22,7 +22,7 @@ import { resolve } from 'path';
import { exec } from '../../lib';
export async function runFpm(config, log, build, type, pkgSpecificFlags) {
const linux = config.getLinuxPlatform();
const linux = config.getPlatform('linux', 'x64');
const version = config.getBuildVersion();
const resolveWithTrailingSlash = (...paths) => `${resolve(...paths)}/`;

View file

@ -38,7 +38,7 @@ const packages = [
url: 'https://github.com/uhop/node-re2/releases/download/1.14.0/linux-x64-64.gz',
sha256: 'f54f059035e71a7ccb3fa201080e260c41d228d13a8247974b4bb157691b6757',
},
windows: {
win32: {
url: 'https://github.com/uhop/node-re2/releases/download/1.14.0/win32-x64-64.gz',
sha256: 'de708446a8b802f4634c2cfef097c2625a2811fdcd8133dfd7b7c485f966caa9',
},

2
x-pack/.gitignore vendored
View file

@ -9,7 +9,7 @@
/plugins/reporting/.chromium/
/legacy/plugins/reporting/.chromium/
/legacy/plugins/reporting/.phantom/
/plugins/reporting/.chromium/
/plugins/reporting/chromium/
/plugins/reporting/.phantom/
/.aws-config.json
/.env

View file

@ -20,7 +20,8 @@ You'll need access to our GCP account, which is where we have two machines provi
Chromium is built via a build tool called "ninja". The build can be configured by specifying build flags either in an "args.gn" file or via commandline args. We have an "args.gn" file per platform:
- mac: darwin/args.gn
- linux: linux/args.gn
- linux 64bit: linux-x64/args.gn
- ARM 64bit: linux-aarch64/args.gn
- windows: windows/args.gn
The various build flags are not well documented. Some are documented [here](https://www.chromium.org/developers/gn-build-configuration). Some, such as `enable_basic_printing = false`, I only found by poking through 3rd party build scripts.
@ -65,15 +66,16 @@ Create the build folder:
Copy the `x-pack/build-chromium` folder to each. Replace `you@your-machine` with the correct username and VM name:
- Mac: `cp -r ~/dev/elastic/kibana/x-pack/build_chromium ~/chromium/build_chromium`
- Linux: `gcloud compute scp --recurse ~/dev/elastic/kibana/x-pack/build_chromium you@your-machine:~/chromium/build_chromium --zone=us-east1-b`
- Mac: `cp -r x-pack/build_chromium ~/chromium/build_chromium`
- Linux: `gcloud compute scp --recurse x-pack/build_chromium you@your-machine:~/chromium/ --zone=us-east1-b --project "XXXXXXXX"`
- Windows: Copy the `build_chromium` folder via the RDP GUI into `c:\chromium\build_chromium`
There is an init script for each platform. This downloads and installs the necessary prerequisites, sets environment variables, etc.
- Mac: `~/chromium/build_chromium/darwin/init.sh`
- Linux: `~/chromium/build_chromium/linux/init.sh`
- Windows `c:\chromium\build_chromium\windows\init.bat`
- Mac x64: `~/chromium/build_chromium/darwin/init.sh`
- Linux x64: `~/chromium/build_chromium/linux/init.sh`
- Linux arm64: `~/chromium/build_chromium/linux/init.sh arm64`
- Windows x64: `c:\chromium\build_chromium\windows\init.bat`
In windows, at least, you will need to do a number of extra steps:
@ -102,15 +104,16 @@ Note: In Linux, you should run the build command in tmux so that if your ssh ses
To run the build, replace the sha in the following commands with the sha that you wish to build:
- Mac: `python ~/chromium/build_chromium/build.py 312d84c8ce62810976feda0d3457108a6dfff9e6`
- Linux: `python ~/chromium/build_chromium/build.py 312d84c8ce62810976feda0d3457108a6dfff9e6`
- Windows: `python c:\chromium\build_chromium\build.py 312d84c8ce62810976feda0d3457108a6dfff9e6`
- Mac x64: `python ~/chromium/build_chromium/build.py 312d84c8ce62810976feda0d3457108a6dfff9e6`
- Linux x64: `python ~/chromium/build_chromium/build.py 312d84c8ce62810976feda0d3457108a6dfff9e6`
- Linux arm64: `python ~/chromium/build_chromium/build.py 312d84c8ce62810976feda0d3457108a6dfff9e6 arm64`
- Windows x64: `python c:\chromium\build_chromium\build.py 312d84c8ce62810976feda0d3457108a6dfff9e6`
## Artifacts
After the build completes, there will be a .zip file and a .md5 file in `~/chromium/chromium/src/out/headless`. These are named like so: `chromium-{first_7_of_SHA}-{platform}`, for example: `chromium-4747cc2-linux`.
After the build completes, there will be a .zip file and a .md5 file in `~/chromium/chromium/src/out/headless`. These are named like so: `chromium-{first_7_of_SHA}-{platform}-{arch}`, for example: `chromium-4747cc2-linux-x64`.
The zip files need to be deployed to s3. For testing, I drop them into `headless-shell-dev`, but for production, they need to be in `headless-shell`. And the `x-pack/plugins/reporting/server/browsers/chromium/paths.ts` file needs to be upated to have the correct `archiveChecksum`, `archiveFilename`, `binaryChecksum` and `baseUrl`. Below is a list of what the archive's are:
The zip files need to be deployed to GCP Storage. For testing, I drop them into `headless-shell-dev`, but for production, they need to be in `headless-shell`. And the `x-pack/plugins/reporting/server/browsers/chromium/paths.ts` file needs to be upated to have the correct `archiveChecksum`, `archiveFilename`, `binaryChecksum` and `baseUrl`. Below is a list of what the archive's are:
- `archiveChecksum`: The contents of the `.md5` file, which is the `md5` checksum of the zip file.
- `binaryChecksum`: The `md5` checksum of the `headless_shell` binary itself.
@ -139,8 +142,8 @@ In the case of Windows, you can use IE to open `http://localhost:9221` and see i
The following links provide helpful context about how the Chromium build works, and its prerequisites:
- https://www.chromium.org/developers/how-tos/get-the-code/working-with-release-branches
- https://chromium.googlesource.com/chromium/src/+/master/docs/windows_build_instructions.md
- https://chromium.googlesource.com/chromium/src/+/master/docs/mac_build_instructions.md
- https://chromium.googlesource.com/chromium/src/+/master/docs/linux_build_instructions.md
- https://chromium.googlesource.com/chromium/src/+/HEAD/docs/windows_build_instructions.md
- https://chromium.googlesource.com/chromium/src/+/HEAD/docs/mac_build_instructions.md
- https://chromium.googlesource.com/chromium/src/+/HEAD/docs/linux/build_instructions.md
- Some build-flag descriptions: https://www.chromium.org/developers/gn-build-configuration
- The serverless Chromium project was indispensable: https://github.com/adieuadieu/serverless-chrome/blob/b29445aa5a96d031be2edd5d1fc8651683bf262c/packages/lambda/builds/chromium/build/build.sh

View file

@ -17,7 +17,10 @@ if (len(sys.argv) < 2):
# 4747cc23ae334a57a35ed3c8e6adcdbc8a50d479
source_version = sys.argv[1]
print('Building Chromium ' + source_version)
# Set to "arm" to build for ARM on Linux
arch_name = sys.argv[2] if len(sys.argv) >= 3 else 'x64'
print('Building Chromium ' + source_version + ' for ' + arch_name)
# Set the environment variables required by the build tools
print('Configuring the build environment')
@ -42,21 +45,29 @@ print('Generating platform-specific args')
print('Copying build args: ' + platform_build_args + ' to out/headless/args.gn')
mkdir('out/headless')
shutil.copyfile(platform_build_args, 'out/headless/args.gn')
print('Adding target_cpu to args')
f = open('out/headless/args.gn', 'a')
f.write('\rtarget_cpu = "' + arch_name + '"')
f.close()
runcmd('gn gen out/headless')
# Build Chromium... this takes *forever* on underpowered VMs
print('Compiling... this will take a while')
runcmd('autoninja -C out/headless headless_shell')
# Optimize the output on Linux and Mac by stripping inessentials from the binary
if platform.system() != 'Windows':
# Optimize the output on Linux x64 and Mac by stripping inessentials from the binary
# ARM must be cross-compiled from Linux and can not read the ARM binary in order to strip
if platform.system() != 'Windows' and arch_name != 'arm64':
print('Optimizing headless_shell')
shutil.move('out/headless/headless_shell', 'out/headless/headless_shell_raw')
runcmd('strip -o out/headless/headless_shell out/headless/headless_shell_raw')
# Create the zip and generate the md5 hash using filenames like:
# chromium-4747cc2-linux.zip
base_filename = 'out/headless/chromium-' + source_version[:7].strip('.') + '-' + platform.system().lower()
# chromium-4747cc2-linux_x64.zip
base_filename = 'out/headless/chromium-' + source_version[:7].strip('.') + '-' + platform.system().lower() + '_' + arch_name
zip_filename = base_filename + '.zip'
md5_filename = base_filename + '.md5'
@ -66,7 +77,7 @@ archive = zipfile.ZipFile(zip_filename, mode='w', compression=zipfile.ZIP_DEFLAT
def archive_file(name):
"""A little helper function to write individual files to the zip file"""
from_path = os.path.join('out/headless', name)
to_path = os.path.join('headless_shell-' + platform.system().lower(), name)
to_path = os.path.join('headless_shell-' + platform.system().lower() + '_' + arch_name, name)
archive.write(from_path, to_path)
# Each platform has slightly different requirements for what dependencies
@ -76,6 +87,9 @@ if platform.system() == 'Linux':
archive_file(os.path.join('swiftshader', 'libEGL.so'))
archive_file(os.path.join('swiftshader', 'libGLESv2.so'))
if arch_name == 'arm64':
archive_file(os.path.join('swiftshader', 'libEGL.so'))
elif platform.system() == 'Windows':
archive_file('headless_shell.exe')
archive_file('dbghelp.dll')

View file

@ -1,4 +1,4 @@
import os, platform
import os, platform, sys
from build_util import runcmd, mkdir, md5_file, root_dir, configure_environment
# This is a cross-platform initialization script which should only be run
@ -29,4 +29,10 @@ runcmd('fetch chromium')
# Build Linux deps
if platform.system() == 'Linux':
os.chdir('src')
if len(sys.argv) >= 2:
sysroot_cmd = 'build/linux/sysroot_scripts/install-sysroot.py --arch=' + sys.argv[1]
print('Running `' + sysroot_cmd + '`')
runcmd(sysroot_cmd)
runcmd('build/install-build-deps.sh')

View file

@ -10,4 +10,4 @@ fi
# Launch the cross-platform init script using a relative path
# from this script's location.
python "`dirname "$0"`/../init.py"
python "`dirname "$0"`/../init.py" $1

View file

@ -9,13 +9,11 @@ require('../src/setup_node_env');
const { buildTask } = require('./tasks/build');
const { devTask } = require('./tasks/dev');
const { testTask, testKarmaTask, testKarmaDebugTask } = require('./tasks/test');
const { prepareTask } = require('./tasks/prepare');
// export the tasks that are runnable from the CLI
module.exports = {
build: buildTask,
dev: devTask,
prepare: prepareTask,
test: testTask,
'test:karma': testKarmaTask,
'test:karma:debug': testKarmaDebugTask,

View file

@ -55,10 +55,6 @@ export const args = ({ userDataDir, viewport, disableSandbox, proxy: proxyConfig
flags.push('--no-sandbox');
}
// log to chrome_debug.log
flags.push('--enable-logging');
flags.push('--v=1');
if (process.platform === 'linux') {
flags.push('--disable-setuid-sandbox');
}

View file

@ -24,7 +24,6 @@ import { CaptureConfig } from '../../../../server/types';
import { LevelLogger } from '../../../lib';
import { safeChildProcess } from '../../safe_child_process';
import { HeadlessChromiumDriver } from '../driver';
import { getChromeLogLocation } from '../paths';
import { puppeteerLaunch } from '../puppeteer';
import { args } from './args';
@ -77,7 +76,6 @@ export class HeadlessChromiumDriverFactory {
`The Reporting plugin encountered issues launching Chromium in a self-test. You may have trouble generating reports.`
);
logger.error(error);
logger.warning(`See Chromium's log output at "${getChromeLogLocation(this.binaryPath)}"`);
return null;
});
}

View file

@ -7,11 +7,12 @@
import path from 'path';
export const paths = {
archivesPath: path.resolve(__dirname, '../../../.chromium'),
archivesPath: path.resolve(__dirname, '../../../../../../.chromium'),
baseUrl: 'https://storage.googleapis.com/headless_shell/',
packages: [
{
platforms: ['darwin', 'freebsd', 'openbsd'],
architecture: 'x64',
archiveFilename: 'chromium-312d84c-darwin.zip',
archiveChecksum: '020303e829745fd332ae9b39442ce570',
binaryChecksum: '5cdec11d45a0eddf782bed9b9f10319f',
@ -19,13 +20,23 @@ export const paths = {
},
{
platforms: ['linux'],
architecture: 'x64',
archiveFilename: 'chromium-312d84c-linux.zip',
archiveChecksum: '15ba9166a42f93ee92e42217b737018d',
binaryChecksum: 'c7fe36ed3e86a6dd23323be0a4e8c0fd',
binaryRelativePath: 'headless_shell-linux/headless_shell',
},
{
platforms: ['linux'],
architecture: 'arm64',
archiveFilename: 'chromium-312d84c-linux_arm64.zip',
archiveChecksum: 'aa4d5b99dd2c1bd8e614e67f63a48652',
binaryChecksum: '7fdccff319396f0aee7f269dd85fe6fc',
binaryRelativePath: 'headless_shell-linux_arm64/headless_shell',
},
{
platforms: ['win32'],
architecture: 'x64',
archiveFilename: 'chromium-312d84c-windows.zip',
archiveChecksum: '3e36adfb755dacacc226ed5fd6b43105',
binaryChecksum: '9913e431fbfc7dfcd958db74ace4d58b',
@ -33,6 +44,3 @@ export const paths = {
},
],
};
export const getChromeLogLocation = (binaryPath: string) =>
path.join(binaryPath, '..', 'chrome_debug.log');

View file

@ -29,7 +29,7 @@ export async function clean(dir: string, expectedPaths: string[], logger: LevelL
await asyncMap(filenames, async (filename) => {
const path = resolvePath(dir, filename);
if (!expectedPaths.includes(path)) {
logger.warn(`Deleting unexpected file ${path}`);
logger.warning(`Deleting unexpected file ${path}`);
await del(path, { force: true });
}
});

View file

@ -7,7 +7,6 @@
import { existsSync } from 'fs';
import { resolve as resolvePath } from 'path';
import { BrowserDownload, chromium } from '../';
import { BROWSER_TYPE } from '../../../common/constants';
import { LevelLogger } from '../../lib';
import { md5 } from './checksum';
import { clean } from './clean';
@ -17,19 +16,9 @@ import { asyncMap } from './util';
/**
* Check for the downloaded archive of each requested browser type and
* download them if they are missing or their checksum is invalid
* @param {String} browserType
* @return {Promise<undefined>}
*/
export async function ensureBrowserDownloaded(browserType = BROWSER_TYPE, logger: LevelLogger) {
await ensureDownloaded([chromium], logger);
}
/**
* Check for the downloaded archive of each requested browser type and
* download them if they are missing or their checksum is invalid*
* @return {Promise<undefined>}
*/
export async function ensureAllBrowsersDownloaded(logger: LevelLogger) {
export async function ensureBrowserDownloaded(logger: LevelLogger) {
await ensureDownloaded([chromium], logger);
}

View file

@ -4,4 +4,4 @@
* you may not use this file except in compliance with the Elastic License.
*/
export { ensureBrowserDownloaded, ensureAllBrowsersDownloaded } from './ensure_downloaded';
export { ensureBrowserDownloaded } from './ensure_downloaded';

View file

@ -12,7 +12,6 @@ import { HeadlessChromiumDriverFactory } from './chromium/driver_factory';
import { installBrowser } from './install';
import { ReportingConfig } from '..';
export { ensureAllBrowsersDownloaded } from './download';
export { HeadlessChromiumDriver } from './chromium/driver';
export { HeadlessChromiumDriverFactory } from './chromium/driver_factory';
export { chromium } from './chromium';
@ -42,7 +41,7 @@ export const initializeBrowserDriverFactory = async (
config: ReportingConfig,
logger: LevelLogger
) => {
const { binaryPath$ } = installBrowser(chromium, config, logger);
const { binaryPath$ } = installBrowser(logger);
const binaryPath = await binaryPath$.pipe(first()).toPromise();
const captureConfig = config.get('capture');
return chromium.createDriverFactory(binaryPath, captureConfig, logger);

View file

@ -4,24 +4,22 @@
* you may not use this file except in compliance with the Elastic License.
*/
import fs from 'fs';
import os from 'os';
import path from 'path';
import del from 'del';
import * as Rx from 'rxjs';
import { first } from 'rxjs/operators';
import { promisify } from 'util';
import { ReportingConfig } from '../';
import { LevelLogger } from '../lib';
import { BrowserDownload } from './';
import { ensureBrowserDownloaded } from './download';
// @ts-ignore
import { md5 } from './download/checksum';
// @ts-ignore
import { extract } from './extract';
const chmod = promisify(fs.chmod);
import { paths } from './chromium/paths';
interface Package {
platforms: string[];
architecture: string;
}
/**
@ -29,44 +27,33 @@ interface Package {
* archive. If there is an error extracting the archive an `ExtractError` is thrown
*/
export function installBrowser(
browser: BrowserDownload,
config: ReportingConfig,
logger: LevelLogger
logger: LevelLogger,
chromiumPath: string = path.resolve(__dirname, '../../chromium'),
platform: string = process.platform,
architecture: string = os.arch()
): { binaryPath$: Rx.Subject<string> } {
const binaryPath$ = new Rx.Subject<string>();
const backgroundInstall = async () => {
const captureConfig = config.get('capture');
const { autoDownload, type: browserType } = captureConfig.browser;
if (autoDownload) {
await ensureBrowserDownloaded(browserType, logger);
}
const pkg = paths.packages.find((p: Package) => {
return p.platforms.includes(platform) && p.architecture === architecture;
});
const pkg = browser.paths.packages.find((p: Package) => p.platforms.includes(process.platform));
if (!pkg) {
throw new Error(`Unsupported platform: ${JSON.stringify(browser, null, 2)}`);
// TODO: validate this
throw new Error(`Unsupported platform: ${platform}-${architecture}`);
}
const dataDir = await config.kbnConfig.get('path', 'data').pipe(first()).toPromise();
const binaryPath = path.join(dataDir, pkg.binaryRelativePath);
const binaryPath = path.join(chromiumPath, pkg.binaryRelativePath);
const binaryChecksum = await md5(binaryPath).catch(() => '');
try {
const binaryChecksum = await md5(binaryPath).catch(() => '');
if (binaryChecksum !== pkg.binaryChecksum) {
await ensureBrowserDownloaded(logger);
if (binaryChecksum !== pkg.binaryChecksum) {
const archive = path.join(browser.paths.archivesPath, pkg.archiveFilename);
logger.info(`Extracting [${archive}] to [${binaryPath}]`);
await extract(archive, dataDir);
await chmod(binaryPath, '755');
}
} catch (error) {
if (error.cause && ['EACCES', 'EEXIST'].includes(error.cause.code)) {
logger.error(
`Error code ${error.cause.code}: Insufficient permissions for extracting the browser archive. ` +
`Make sure the Kibana data directory (path.data) is owned by the same user that is running Kibana.`
);
}
const archive = path.join(paths.archivesPath, pkg.archiveFilename);
logger.info(`Extracting [${archive}] to [${binaryPath}]`);
throw error; // reject the promise with the original error
await del(chromiumPath);
await extract(archive, chromiumPath);
}
logger.debug(`Browser executable: ${binaryPath}`);

View file

@ -16,7 +16,6 @@ import fancyLog from 'fancy-log';
import chalk from 'chalk';
import { generateNoticeFromSource } from '../../src/dev/notice';
import { prepareTask } from './prepare';
import { gitInfo } from './helpers/git_info';
import { PKG_NAME } from './helpers/pkg';
import { BUILD_VERSION } from './helpers/build_version';
@ -78,7 +77,6 @@ async function generateNoticeText() {
export const buildTask = gulp.series(
cleanBuildTask,
reportTask,
prepareTask,
buildCanvasShareableRuntime,
pluginHelpersBuild,
generateNoticeText

View file

@ -7,9 +7,7 @@
import * as pluginHelpers from '@kbn/plugin-helpers';
import gulp from 'gulp';
import { prepareTask } from './prepare';
export const devTask = gulp.series(prepareTask, async function startKibanaServer() {
export const devTask = gulp.series(async function startKibanaServer() {
await pluginHelpers.run('start', {
flags: process.argv.slice(3),
});

View file

@ -1,25 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { ensureAllBrowsersDownloaded } from '../plugins/reporting/server/browsers';
import { LevelLogger } from '../plugins/reporting/server/lib';
export const prepareTask = async () => {
// eslint-disable-next-line no-console
const consoleLogger = (tag: string) => (message: unknown) => console.log(tag, message);
const innerLogger = {
get: () => innerLogger,
debug: consoleLogger('debug'),
info: consoleLogger('info'),
warn: consoleLogger('warn'),
trace: consoleLogger('trace'),
error: consoleLogger('error'),
fatal: consoleLogger('fatal'),
log: consoleLogger('log'),
};
const levelLogger = new LevelLogger(innerLogger);
await ensureAllBrowsersDownloaded(levelLogger);
};