mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 09:48:58 -04:00
[Reporting] Move code out of Legacy (#67904)
* [Reporting] Move code out of Legacy * Elasticsearch is not a plugin dep * add data as plugin dependo * diff cleanup 1 * log the browser download * Update paths in outside code for new Reporting home * fix download test * add numeral typing for x-pack/test * Fix jest tests for np migration * Shorten import paths * remove this file, add typings to the node module * remove local typing that has been provided by node module * Add optional security plugin dep * revert conflicting apm typings removal * fix i18n * fix snakecase whitelist Co-authored-by: Joel Griffith <joel.griffith@elastic.co> Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
parent
0b8e0c054f
commit
2f67cbfa9c
226 changed files with 533 additions and 664 deletions
|
@ -40,7 +40,7 @@ echo "Creating bootstrap_cache archive"
|
|||
# archive cacheable directories
|
||||
mkdir -p "$HOME/.kibana/bootstrap_cache"
|
||||
tar -cf "$HOME/.kibana/bootstrap_cache/$branch.tar" \
|
||||
x-pack/legacy/plugins/reporting/.chromium \
|
||||
x-pack/plugins/reporting/.chromium \
|
||||
.es \
|
||||
.chromedriver \
|
||||
.geckodriver;
|
||||
|
|
|
@ -261,7 +261,7 @@ export class ClusterManager {
|
|||
/debug\.log$/,
|
||||
...pluginInternalDirsIgnore,
|
||||
fromRoot('src/legacy/server/sass/__tmp__'),
|
||||
fromRoot('x-pack/legacy/plugins/reporting/.chromium'),
|
||||
fromRoot('x-pack/plugins/reporting/.chromium'),
|
||||
fromRoot('x-pack/plugins/siem/cypress'),
|
||||
fromRoot('x-pack/plugins/apm/e2e'),
|
||||
fromRoot('x-pack/plugins/apm/scripts'),
|
||||
|
|
|
@ -206,7 +206,7 @@ export const CleanExtraBrowsersTask = {
|
|||
|
||||
async run(config, log, build) {
|
||||
const getBrowserPathsForPlatform = (platform) => {
|
||||
const reportingDir = 'x-pack/legacy/plugins/reporting';
|
||||
const reportingDir = 'x-pack/plugins/reporting';
|
||||
const chromiumDir = '.chromium';
|
||||
const chromiumPath = (p) =>
|
||||
build.resolvePathForPlatform(platform, reportingDir, chromiumDir, p);
|
||||
|
|
|
@ -35,13 +35,13 @@ describe(`Transform fn`, () => {
|
|||
it(`should remove the jenkins workspace path`, () => {
|
||||
const obj = {
|
||||
staticSiteUrl:
|
||||
'/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana/x-pack/legacy/plugins/reporting/server/browsers/extract/unzip.js',
|
||||
'/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.js',
|
||||
COVERAGE_INGESTION_KIBANA_ROOT:
|
||||
'/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana',
|
||||
};
|
||||
expect(coveredFilePath(obj)).to.have.property(
|
||||
'coveredFilePath',
|
||||
'x-pack/legacy/plugins/reporting/server/browsers/extract/unzip.js'
|
||||
'x-pack/plugins/reporting/server/browsers/extract/unzip.js'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana/x-pack/legacy/plugins/reporting/server/browsers/extract/unzip.js": {
|
||||
"/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.js": {
|
||||
"lines": {
|
||||
"total": 4,
|
||||
"covered": 4,
|
||||
|
|
|
@ -173,12 +173,12 @@ export const TEMPORARILY_IGNORED_PATHS = [
|
|||
'x-pack/plugins/monitoring/public/icons/health-green.svg',
|
||||
'x-pack/plugins/monitoring/public/icons/health-red.svg',
|
||||
'x-pack/plugins/monitoring/public/icons/health-yellow.svg',
|
||||
'x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/pdf/assets/fonts/noto/NotoSansCJKtc-Medium.ttf',
|
||||
'x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/pdf/assets/fonts/noto/NotoSansCJKtc-Regular.ttf',
|
||||
'x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/pdf/assets/fonts/roboto/Roboto-Italic.ttf',
|
||||
'x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/pdf/assets/fonts/roboto/Roboto-Medium.ttf',
|
||||
'x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/pdf/assets/fonts/roboto/Roboto-Regular.ttf',
|
||||
'x-pack/legacy/plugins/reporting/export_types/printable_pdf/server/lib/pdf/assets/img/logo-grey.png',
|
||||
'x-pack/plugins/reporting/server/export_types/printable_pdf/server/lib/pdf/assets/fonts/noto/NotoSansCJKtc-Medium.ttf',
|
||||
'x-pack/plugins/reporting/server/export_types/printable_pdf/server/lib/pdf/assets/fonts/noto/NotoSansCJKtc-Regular.ttf',
|
||||
'x-pack/plugins/reporting/server/export_types/printable_pdf/server/lib/pdf/assets/fonts/roboto/Roboto-Italic.ttf',
|
||||
'x-pack/plugins/reporting/server/export_types/printable_pdf/server/lib/pdf/assets/fonts/roboto/Roboto-Medium.ttf',
|
||||
'x-pack/plugins/reporting/server/export_types/printable_pdf/server/lib/pdf/assets/fonts/roboto/Roboto-Regular.ttf',
|
||||
'x-pack/plugins/reporting/server/export_types/printable_pdf/server/lib/pdf/assets/img/logo-grey.png',
|
||||
'x-pack/test/functional/es_archives/monitoring/beats-with-restarted-instance/data.json.gz',
|
||||
'x-pack/test/functional/es_archives/monitoring/beats-with-restarted-instance/mappings.json',
|
||||
'x-pack/test/functional/es_archives/monitoring/logstash-pipelines/data.json.gz',
|
||||
|
|
4
x-pack/.gitignore
vendored
4
x-pack/.gitignore
vendored
|
@ -5,8 +5,8 @@
|
|||
/test/functional/screenshots
|
||||
/test/functional/apps/reporting/reports/session
|
||||
/test/reporting/configs/failure_debug/
|
||||
/legacy/plugins/reporting/.chromium/
|
||||
/legacy/plugins/reporting/.phantom/
|
||||
/plugins/reporting/.chromium/
|
||||
/plugins/reporting/.phantom/
|
||||
/.aws-config.json
|
||||
/.env
|
||||
/.kibana-plugin-helpers.dev.*
|
||||
|
|
|
@ -35,7 +35,7 @@
|
|||
"xpack.monitoring": ["plugins/monitoring", "legacy/plugins/monitoring"],
|
||||
"xpack.remoteClusters": "plugins/remote_clusters",
|
||||
"xpack.painlessLab": "plugins/painless_lab",
|
||||
"xpack.reporting": ["plugins/reporting", "legacy/plugins/reporting"],
|
||||
"xpack.reporting": ["plugins/reporting"],
|
||||
"xpack.rollupJobs": ["legacy/plugins/rollup", "plugins/rollup"],
|
||||
"xpack.searchProfiler": "plugins/searchprofiler",
|
||||
"xpack.security": ["legacy/plugins/security", "plugins/security"],
|
||||
|
|
|
@ -13,8 +13,8 @@
|
|||
"index.js",
|
||||
".i18nrc.json",
|
||||
"plugins/**/*",
|
||||
"legacy/plugins/reporting/.phantom/*",
|
||||
"legacy/plugins/reporting/.chromium/*",
|
||||
"plugins/reporting/.phantom/*",
|
||||
"plugins/reporting/.chromium/*",
|
||||
"legacy/common/**/*",
|
||||
"legacy/plugins/**/*",
|
||||
"legacy/server/**/*",
|
||||
|
|
|
@ -110,7 +110,7 @@ To run the build, replace the sha in the following commands with the sha that yo
|
|||
|
||||
After the build completes, there will be a .zip file and a .md5 file in `~/chromium/chromium/src/out/headless`. These are named like so: `chromium-{first_7_of_SHA}-{platform}`, for example: `chromium-4747cc2-linux`.
|
||||
|
||||
The zip files need to be deployed to s3. For testing, I drop them into `headless-shell-dev`, but for production, they need to be in `headless-shell`. And the `x-pack/legacy/plugins/reporting/server/browsers/chromium/paths.js` file needs to be upated to have the correct `archiveChecksum`, `archiveFilename`, `binaryChecksum` and `baseUrl`. Below is a list of what the archive's are:
|
||||
The zip files need to be deployed to s3. For testing, I drop them into `headless-shell-dev`, but for production, they need to be in `headless-shell`. And the `x-pack/plugins/reporting/server/browsers/chromium/paths.ts` file needs to be upated to have the correct `archiveChecksum`, `archiveFilename`, `binaryChecksum` and `baseUrl`. Below is a list of what the archive's are:
|
||||
|
||||
- `archiveChecksum`: The contents of the `.md5` file, which is the `md5` checksum of the zip file.
|
||||
- `binaryChecksum`: The `md5` checksum of the `headless_shell` binary itself.
|
||||
|
|
|
@ -6,7 +6,6 @@
|
|||
|
||||
import { xpackMain } from './legacy/plugins/xpack_main';
|
||||
import { monitoring } from './legacy/plugins/monitoring';
|
||||
import { reporting } from './legacy/plugins/reporting';
|
||||
import { security } from './legacy/plugins/security';
|
||||
import { dashboardMode } from './legacy/plugins/dashboard_mode';
|
||||
import { beats } from './legacy/plugins/beats_management';
|
||||
|
@ -18,7 +17,6 @@ module.exports = function (kibana) {
|
|||
return [
|
||||
xpackMain(kibana),
|
||||
monitoring(kibana),
|
||||
reporting(kibana),
|
||||
spaces(kibana),
|
||||
security(kibana),
|
||||
dashboardMode(kibana),
|
||||
|
|
|
@ -1,49 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { Legacy } from 'kibana';
|
||||
import { resolve } from 'path';
|
||||
import { PLUGIN_ID, UI_SETTINGS_CUSTOM_PDF_LOGO } from './common/constants';
|
||||
import { legacyInit } from './server/legacy';
|
||||
|
||||
export type ReportingPluginSpecOptions = Legacy.PluginSpecOptions;
|
||||
|
||||
const kbToBase64Length = (kb: number) => Math.floor((kb * 1024 * 8) / 6);
|
||||
|
||||
export const reporting = (kibana: any) => {
|
||||
return new kibana.Plugin({
|
||||
id: PLUGIN_ID,
|
||||
publicDir: resolve(__dirname, 'public'),
|
||||
require: ['kibana', 'elasticsearch', 'xpack_main'],
|
||||
|
||||
uiExports: {
|
||||
uiSettingDefaults: {
|
||||
[UI_SETTINGS_CUSTOM_PDF_LOGO]: {
|
||||
name: i18n.translate('xpack.reporting.pdfFooterImageLabel', {
|
||||
defaultMessage: 'PDF footer image',
|
||||
}),
|
||||
value: null,
|
||||
description: i18n.translate('xpack.reporting.pdfFooterImageDescription', {
|
||||
defaultMessage: `Custom image to use in the PDF's footer`,
|
||||
}),
|
||||
type: 'image',
|
||||
validation: {
|
||||
maxSize: {
|
||||
length: kbToBase64Length(200),
|
||||
description: '200 kB',
|
||||
},
|
||||
},
|
||||
category: [PLUGIN_ID],
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
async init(server: Legacy.Server) {
|
||||
return legacyInit(server, this);
|
||||
},
|
||||
} as ReportingPluginSpecOptions);
|
||||
};
|
|
@ -1,17 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { PluginInitializerContext } from 'src/core/server';
|
||||
import { ReportingConfig } from './config';
|
||||
import { ReportingCore } from './core';
|
||||
import { ReportingPlugin as Plugin } from './plugin';
|
||||
|
||||
export const plugin = (context: PluginInitializerContext, config: ReportingConfig) => {
|
||||
return new Plugin(context, config);
|
||||
};
|
||||
|
||||
export { ReportingPlugin } from './plugin';
|
||||
export { ReportingConfig, ReportingCore };
|
|
@ -1,61 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { Legacy } from 'kibana';
|
||||
import { take } from 'rxjs/operators';
|
||||
import { PluginInitializerContext } from 'src/core/server';
|
||||
import { LicensingPluginSetup } from '../../../../plugins/licensing/server';
|
||||
import { ReportingPluginSpecOptions } from '../';
|
||||
import { PluginsSetup } from '../../../../plugins/reporting/server';
|
||||
import { SecurityPluginSetup } from '../../../../plugins/security/server';
|
||||
import { buildConfig } from './config';
|
||||
import { plugin } from './index';
|
||||
import { LegacySetup, ReportingStartDeps } from './types';
|
||||
|
||||
const buildLegacyDependencies = (
|
||||
server: Legacy.Server,
|
||||
reportingPlugin: ReportingPluginSpecOptions
|
||||
): LegacySetup => ({
|
||||
route: server.route.bind(server),
|
||||
plugins: {
|
||||
xpack_main: server.plugins.xpack_main,
|
||||
reporting: reportingPlugin,
|
||||
},
|
||||
});
|
||||
|
||||
/*
|
||||
* Starts the New Platform instance of Reporting using legacy dependencies
|
||||
*/
|
||||
export const legacyInit = async (
|
||||
server: Legacy.Server,
|
||||
reportingLegacyPlugin: ReportingPluginSpecOptions
|
||||
) => {
|
||||
const { core: coreSetup } = server.newPlatform.setup;
|
||||
const { config$ } = (server.newPlatform.setup.plugins.reporting as PluginsSetup).__legacy;
|
||||
const reportingConfig = await config$.pipe(take(1)).toPromise();
|
||||
const __LEGACY = buildLegacyDependencies(server, reportingLegacyPlugin);
|
||||
|
||||
const pluginInstance = plugin(
|
||||
server.newPlatform.coreContext as PluginInitializerContext,
|
||||
buildConfig(coreSetup, server, reportingConfig)
|
||||
);
|
||||
|
||||
await pluginInstance.setup(coreSetup, {
|
||||
elasticsearch: coreSetup.elasticsearch,
|
||||
licensing: server.newPlatform.setup.plugins.licensing as LicensingPluginSetup,
|
||||
security: server.newPlatform.setup.plugins.security as SecurityPluginSetup,
|
||||
usageCollection: server.newPlatform.setup.plugins.usageCollection,
|
||||
__LEGACY,
|
||||
});
|
||||
|
||||
// Schedule to call the "start" hook only after start dependencies are ready
|
||||
coreSetup.getStartServices().then(([core, plugins]) =>
|
||||
pluginInstance.start(core, {
|
||||
data: (plugins as ReportingStartDeps).data,
|
||||
__LEGACY,
|
||||
})
|
||||
);
|
||||
};
|
|
@ -1,11 +0,0 @@
|
|||
import events from 'events';
|
||||
|
||||
export class QueueMock extends events.EventEmitter {
|
||||
constructor() {
|
||||
super();
|
||||
}
|
||||
|
||||
setClient(client) {
|
||||
this.client = client;
|
||||
}
|
||||
}
|
|
@ -1,81 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { CoreSetup, CoreStart, Plugin, PluginInitializerContext } from 'src/core/server';
|
||||
import { createBrowserDriverFactory } from './browsers';
|
||||
import { ReportingConfig } from './config';
|
||||
import { ReportingCore } from './core';
|
||||
import { registerRoutes } from './routes';
|
||||
import { createQueueFactory, enqueueJobFactory, LevelLogger, runValidations } from './lib';
|
||||
import { setFieldFormats } from './services';
|
||||
import { ReportingSetup, ReportingSetupDeps, ReportingStart, ReportingStartDeps } from './types';
|
||||
import { registerReportingUsageCollector } from './usage';
|
||||
// @ts-ignore no module definition
|
||||
import { mirrorPluginStatus } from '../../../server/lib/mirror_plugin_status';
|
||||
|
||||
export class ReportingPlugin
|
||||
implements Plugin<ReportingSetup, ReportingStart, ReportingSetupDeps, ReportingStartDeps> {
|
||||
private config: ReportingConfig;
|
||||
private logger: LevelLogger;
|
||||
private reportingCore: ReportingCore;
|
||||
|
||||
constructor(context: PluginInitializerContext, config: ReportingConfig) {
|
||||
this.config = config;
|
||||
this.logger = new LevelLogger(context.logger.get('reporting'));
|
||||
this.reportingCore = new ReportingCore(this.config);
|
||||
}
|
||||
|
||||
public async setup(core: CoreSetup, plugins: ReportingSetupDeps) {
|
||||
const { config } = this;
|
||||
const { elasticsearch, __LEGACY, licensing, security } = plugins;
|
||||
const router = core.http.createRouter();
|
||||
const basePath = core.http.basePath.get;
|
||||
const { xpack_main: xpackMainLegacy, reporting: reportingLegacy } = __LEGACY.plugins;
|
||||
|
||||
// legacy plugin status
|
||||
mirrorPluginStatus(xpackMainLegacy, reportingLegacy);
|
||||
|
||||
const browserDriverFactory = await createBrowserDriverFactory(config, this.logger);
|
||||
const deps = {
|
||||
browserDriverFactory,
|
||||
elasticsearch,
|
||||
licensing,
|
||||
basePath,
|
||||
router,
|
||||
security,
|
||||
};
|
||||
|
||||
runValidations(config, elasticsearch, browserDriverFactory, this.logger);
|
||||
|
||||
this.reportingCore.pluginSetup(deps);
|
||||
registerReportingUsageCollector(this.reportingCore, plugins);
|
||||
registerRoutes(this.reportingCore, this.logger);
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
public async start(core: CoreStart, plugins: ReportingStartDeps) {
|
||||
const { reportingCore, logger } = this;
|
||||
|
||||
const esqueue = await createQueueFactory(reportingCore, logger);
|
||||
const enqueueJob = enqueueJobFactory(reportingCore, logger);
|
||||
|
||||
this.reportingCore.pluginStart({
|
||||
savedObjects: core.savedObjects,
|
||||
uiSettings: core.uiSettings,
|
||||
esqueue,
|
||||
enqueueJob,
|
||||
});
|
||||
|
||||
setFieldFormats(plugins.data.fieldFormats);
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
public getReportingCore() {
|
||||
return this.reportingCore;
|
||||
}
|
||||
}
|
|
@ -64,7 +64,7 @@ export class GisMap extends Component {
|
|||
// - If it's not, then reporting injects a listener into the browser for a custom "renderComplete" event.
|
||||
// - When that event is fired, we snapshot the viz and move on.
|
||||
// Failure to not have the dom attribute, or custom event, will timeout the job.
|
||||
// See x-pack/legacy/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts for more.
|
||||
// See x-pack/plugins/reporting/export_types/common/lib/screenshots/wait_for_render.ts for more.
|
||||
_onInitialLoadRenderComplete = () => {
|
||||
const el = document.querySelector(`[data-dom-id="${this.state.domId}"]`);
|
||||
|
||||
|
|
|
@ -5,3 +5,4 @@
|
|||
*/
|
||||
|
||||
export { CancellationToken } from './cancellation_token';
|
||||
export { Poller } from './poller';
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
|
||||
export { ConfigType } from '../server/config';
|
||||
export { ReportingConfigType } from '../server/config';
|
||||
|
||||
export type JobId = string;
|
||||
export type JobStatus =
|
||||
|
|
|
@ -3,18 +3,18 @@
|
|||
"version": "8.0.0",
|
||||
"kibanaVersion": "kibana",
|
||||
"optionalPlugins": [
|
||||
"security",
|
||||
"usageCollection"
|
||||
],
|
||||
"configPath": ["xpack", "reporting"],
|
||||
"requiredPlugins": [
|
||||
"data",
|
||||
"home",
|
||||
"management",
|
||||
"licensing",
|
||||
"uiActions",
|
||||
"embeddable",
|
||||
"share",
|
||||
"kibanaLegacy",
|
||||
"licensing"
|
||||
"share"
|
||||
],
|
||||
"server": true,
|
||||
"ui": true
|
||||
|
|
|
@ -26,7 +26,7 @@ import {
|
|||
import { ManagementSectionId, ManagementSetup } from '../../../../src/plugins/management/public';
|
||||
import { SharePluginSetup } from '../../../../src/plugins/share/public';
|
||||
import { LicensingPluginSetup } from '../../licensing/public';
|
||||
import { ConfigType, JobId, JobStatusBuckets } from '../common/types';
|
||||
import { ReportingConfigType, JobId, JobStatusBuckets } from '../common/types';
|
||||
import { JOB_COMPLETION_NOTIFICATIONS_SESSION_KEY } from '../constants';
|
||||
import { getGeneralErrorToast } from './components';
|
||||
import { ReportListing } from './components/report_listing';
|
||||
|
@ -37,7 +37,7 @@ import { csvReportingProvider } from './share_context_menu/register_csv_reportin
|
|||
import { reportingPDFPNGProvider } from './share_context_menu/register_pdf_png_reporting';
|
||||
|
||||
export interface ClientConfigType {
|
||||
poll: ConfigType['poll'];
|
||||
poll: ReportingConfigType['poll'];
|
||||
}
|
||||
|
||||
function getStored(): JobId[] {
|
||||
|
|
|
@ -9,7 +9,7 @@ import { map, trunc } from 'lodash';
|
|||
import open from 'opn';
|
||||
import { ElementHandle, EvaluateFn, Page, Response, SerializableOrJSHandle } from 'puppeteer';
|
||||
import { parse as parseUrl } from 'url';
|
||||
import { ViewZoomWidthHeight } from '../../../../export_types/common/layouts/layout';
|
||||
import { ViewZoomWidthHeight } from '../../../export_types/common/layouts/layout';
|
||||
import { LevelLogger } from '../../../lib';
|
||||
import { ConditionalHeaders, ElementPosition } from '../../../types';
|
||||
import { allowRequest, NetworkPolicy } from '../../network_policy';
|
|
@ -4,6 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { first } from 'rxjs/operators';
|
||||
import { ReportingConfig } from '../';
|
||||
import { LevelLogger } from '../lib';
|
||||
import { HeadlessChromiumDriverFactory } from './chromium/driver_factory';
|
||||
|
@ -19,13 +20,13 @@ export async function createBrowserDriverFactory(
|
|||
const browserConfig = captureConfig.browser.chromium;
|
||||
const browserAutoDownload = captureConfig.browser.autoDownload;
|
||||
const browserType = captureConfig.browser.type;
|
||||
const dataDir = config.kbnConfig.get('path', 'data');
|
||||
const dataDir = await config.kbnConfig.get('path', 'data').pipe(first()).toPromise();
|
||||
|
||||
if (browserConfig.disableSandbox) {
|
||||
logger.warning(`Enabling the Chromium sandbox provides an additional layer of protection.`);
|
||||
}
|
||||
if (browserAutoDownload) {
|
||||
await ensureBrowserDownloaded(browserType);
|
||||
await ensureBrowserDownloaded(browserType, logger);
|
||||
}
|
||||
|
||||
try {
|
|
@ -4,17 +4,16 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import del from 'del';
|
||||
import { readdirSync } from 'fs';
|
||||
import { resolve as resolvePath } from 'path';
|
||||
|
||||
import del from 'del';
|
||||
|
||||
import { log, asyncMap } from './util';
|
||||
import { LevelLogger } from '../../lib';
|
||||
import { asyncMap } from './util';
|
||||
|
||||
/**
|
||||
* Delete any file in the `dir` that is not in the expectedPaths
|
||||
*/
|
||||
export async function clean(dir: string, expectedPaths: string[]) {
|
||||
export async function clean(dir: string, expectedPaths: string[], logger: LevelLogger) {
|
||||
let filenames: string[];
|
||||
try {
|
||||
filenames = await readdirSync(dir);
|
||||
|
@ -30,7 +29,7 @@ export async function clean(dir: string, expectedPaths: string[]) {
|
|||
await asyncMap(filenames, async (filename) => {
|
||||
const path = resolvePath(dir, filename);
|
||||
if (!expectedPaths.includes(path)) {
|
||||
log(`Deleting unexpected file ${path}`);
|
||||
logger.warn(`Deleting unexpected file ${path}`);
|
||||
await del(path, { force: true });
|
||||
}
|
||||
});
|
|
@ -5,11 +5,11 @@
|
|||
*/
|
||||
|
||||
import { createHash } from 'crypto';
|
||||
import { resolve as resolvePath } from 'path';
|
||||
import { readFileSync } from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
import del from 'del';
|
||||
import { readFileSync } from 'fs';
|
||||
import { resolve as resolvePath } from 'path';
|
||||
import { Readable } from 'stream';
|
||||
import { LevelLogger } from '../../lib';
|
||||
import { download } from './download';
|
||||
|
||||
const TEMP_DIR = resolvePath(__dirname, '__tmp__');
|
||||
|
@ -29,6 +29,12 @@ class ReadableOf extends Readable {
|
|||
jest.mock('axios');
|
||||
const request: jest.Mock = jest.requireMock('axios').request;
|
||||
|
||||
const mockLogger = ({
|
||||
error: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
info: jest.fn(),
|
||||
} as unknown) as LevelLogger;
|
||||
|
||||
test('downloads the url to the path', async () => {
|
||||
const BODY = 'abdcefg';
|
||||
request.mockImplementationOnce(async () => {
|
||||
|
@ -37,7 +43,7 @@ test('downloads the url to the path', async () => {
|
|||
};
|
||||
});
|
||||
|
||||
await download('url', TEMP_FILE);
|
||||
await download('url', TEMP_FILE, mockLogger);
|
||||
expect(readFileSync(TEMP_FILE, 'utf8')).toEqual(BODY);
|
||||
});
|
||||
|
||||
|
@ -50,7 +56,7 @@ test('returns the md5 hex hash of the http body', async () => {
|
|||
};
|
||||
});
|
||||
|
||||
const returned = await download('url', TEMP_FILE);
|
||||
const returned = await download('url', TEMP_FILE, mockLogger);
|
||||
expect(returned).toEqual(HASH);
|
||||
});
|
||||
|
||||
|
@ -59,7 +65,7 @@ test('throws if request emits an error', async () => {
|
|||
throw new Error('foo');
|
||||
});
|
||||
|
||||
return expect(download('url', TEMP_FILE)).rejects.toThrow('foo');
|
||||
return expect(download('url', TEMP_FILE, mockLogger)).rejects.toThrow('foo');
|
||||
});
|
||||
|
||||
afterEach(async () => await del(TEMP_DIR));
|
|
@ -4,13 +4,11 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { openSync, writeSync, closeSync, mkdirSync } from 'fs';
|
||||
import { createHash } from 'crypto';
|
||||
import { dirname } from 'path';
|
||||
|
||||
import Axios from 'axios';
|
||||
|
||||
import { log } from './util';
|
||||
import { createHash } from 'crypto';
|
||||
import { closeSync, mkdirSync, openSync, writeSync } from 'fs';
|
||||
import { dirname } from 'path';
|
||||
import { LevelLogger } from '../../lib';
|
||||
|
||||
/**
|
||||
* Download a url and calculate it's checksum
|
||||
|
@ -18,8 +16,8 @@ import { log } from './util';
|
|||
* @param {String} path
|
||||
* @return {Promise<String>} checksum of the downloaded file
|
||||
*/
|
||||
export async function download(url: string, path: string) {
|
||||
log(`Downloading ${url}`);
|
||||
export async function download(url: string, path: string, logger: LevelLogger) {
|
||||
logger.info(`Downloading ${url} to ${path}`);
|
||||
|
||||
const hash = createHash('md5');
|
||||
|
||||
|
@ -39,7 +37,15 @@ export async function download(url: string, path: string) {
|
|||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
resp.data.on('error', reject).on('end', resolve);
|
||||
resp.data
|
||||
.on('error', (err: Error) => {
|
||||
logger.error(err);
|
||||
reject(err);
|
||||
})
|
||||
.on('end', () => {
|
||||
logger.info(`Downloaded ${url}`);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
} finally {
|
||||
closeSync(handle);
|
|
@ -8,6 +8,7 @@ import { existsSync } from 'fs';
|
|||
import { resolve as resolvePath } from 'path';
|
||||
import { BrowserDownload, chromium } from '../';
|
||||
import { BROWSER_TYPE } from '../../../common/constants';
|
||||
import { LevelLogger } from '../../lib';
|
||||
import { md5 } from './checksum';
|
||||
import { clean } from './clean';
|
||||
import { download } from './download';
|
||||
|
@ -19,16 +20,17 @@ import { asyncMap } from './util';
|
|||
* @param {String} browserType
|
||||
* @return {Promise<undefined>}
|
||||
*/
|
||||
export async function ensureBrowserDownloaded(browserType = BROWSER_TYPE) {
|
||||
await ensureDownloaded([chromium]);
|
||||
export async function ensureBrowserDownloaded(browserType = BROWSER_TYPE, logger: LevelLogger) {
|
||||
await ensureDownloaded([chromium], logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Like ensureBrowserDownloaded(), except it applies to all browsers
|
||||
* Check for the downloaded archive of each requested browser type and
|
||||
* download them if they are missing or their checksum is invalid*
|
||||
* @return {Promise<undefined>}
|
||||
*/
|
||||
export async function ensureAllBrowsersDownloaded() {
|
||||
await ensureDownloaded([chromium]);
|
||||
export async function ensureAllBrowsersDownloaded(logger: LevelLogger) {
|
||||
await ensureDownloaded([chromium], logger);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -38,13 +40,14 @@ export async function ensureAllBrowsersDownloaded() {
|
|||
* @param {BrowserSpec} browsers
|
||||
* @return {Promise<undefined>}
|
||||
*/
|
||||
async function ensureDownloaded(browsers: BrowserDownload[]) {
|
||||
async function ensureDownloaded(browsers: BrowserDownload[], logger: LevelLogger) {
|
||||
await asyncMap(browsers, async (browser) => {
|
||||
const { archivesPath } = browser.paths;
|
||||
|
||||
await clean(
|
||||
archivesPath,
|
||||
browser.paths.packages.map((p) => resolvePath(archivesPath, p.archiveFilename))
|
||||
browser.paths.packages.map((p) => resolvePath(archivesPath, p.archiveFilename)),
|
||||
logger
|
||||
);
|
||||
|
||||
const invalidChecksums: string[] = [];
|
||||
|
@ -53,21 +56,24 @@ async function ensureDownloaded(browsers: BrowserDownload[]) {
|
|||
const path = resolvePath(archivesPath, archiveFilename);
|
||||
|
||||
if (existsSync(path) && (await md5(path)) === archiveChecksum) {
|
||||
logger.info(`Browser archive exists in ${path}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const downloadedChecksum = await download(url, path);
|
||||
const downloadedChecksum = await download(url, path, logger);
|
||||
if (downloadedChecksum !== archiveChecksum) {
|
||||
invalidChecksums.push(`${url} => ${path}`);
|
||||
}
|
||||
});
|
||||
|
||||
if (invalidChecksums.length) {
|
||||
throw new Error(
|
||||
const err = new Error(
|
||||
`Error downloading browsers, checksums incorrect for:\n - ${invalidChecksums.join(
|
||||
'\n - '
|
||||
)}`
|
||||
);
|
||||
logger.error(err);
|
||||
throw err;
|
||||
}
|
||||
});
|
||||
}
|
|
@ -6,17 +6,6 @@
|
|||
|
||||
import { Readable } from 'stream';
|
||||
|
||||
/**
|
||||
* Log a message if the DEBUG environment variable is set
|
||||
*/
|
||||
export function log(...args: any[]) {
|
||||
if (process.env.DEBUG) {
|
||||
// allow console log since this is off by default and only for debugging
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(...args);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterate an array asynchronously and in parallel
|
||||
*/
|
|
@ -4,10 +4,11 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { Legacy } from 'kibana';
|
||||
import { Observable } from 'rxjs';
|
||||
import { get } from 'lodash';
|
||||
import { CoreSetup } from 'src/core/server';
|
||||
import { ConfigType as ReportingConfigType } from '../../../../../plugins/reporting/server';
|
||||
import { map } from 'rxjs/operators';
|
||||
import { CoreSetup, PluginInitializerContext } from 'src/core/server';
|
||||
import { ReportingConfigType } from './schema';
|
||||
|
||||
// make config.get() aware of the value type it returns
|
||||
interface Config<BaseType> {
|
||||
|
@ -39,7 +40,7 @@ interface Config<BaseType> {
|
|||
}
|
||||
|
||||
interface KbnServerConfigType {
|
||||
path: { data: string };
|
||||
path: { data: Observable<string> };
|
||||
server: {
|
||||
basePath: string;
|
||||
host: string;
|
||||
|
@ -55,17 +56,16 @@ export interface ReportingConfig extends Config<ReportingConfigType> {
|
|||
}
|
||||
|
||||
export const buildConfig = (
|
||||
initContext: PluginInitializerContext<ReportingConfigType>,
|
||||
core: CoreSetup,
|
||||
server: Legacy.Server,
|
||||
reportingConfig: ReportingConfigType
|
||||
): ReportingConfig => {
|
||||
const config = server.config();
|
||||
const { http } = core;
|
||||
const serverInfo = http.getServerInfo();
|
||||
|
||||
const kbnConfig = {
|
||||
path: {
|
||||
data: config.get('path.data'),
|
||||
data: initContext.config.legacy.globalConfig$.pipe(map((c) => c.path.data)),
|
||||
},
|
||||
server: {
|
||||
basePath: core.http.basePath.serverBasePath,
|
||||
|
@ -84,5 +84,3 @@ export const buildConfig = (
|
|||
},
|
||||
};
|
||||
};
|
||||
|
||||
export { ReportingConfigType };
|
|
@ -5,9 +5,10 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { CoreSetup, Logger, PluginInitializerContext } from 'src/core/server';
|
||||
import { ConfigType as ReportingConfigType } from './schema';
|
||||
import { CoreSetup, PluginInitializerContext } from 'src/core/server';
|
||||
import { LevelLogger } from '../lib';
|
||||
import { createConfig$ } from './create_config';
|
||||
import { ReportingConfigType } from './schema';
|
||||
|
||||
interface KibanaServer {
|
||||
host?: string;
|
||||
|
@ -37,14 +38,14 @@ const makeMockCoreSetup = (serverInfo: KibanaServer): CoreSetup =>
|
|||
describe('Reporting server createConfig$', () => {
|
||||
let mockCoreSetup: CoreSetup;
|
||||
let mockInitContext: PluginInitializerContext;
|
||||
let mockLogger: Logger;
|
||||
let mockLogger: LevelLogger;
|
||||
|
||||
beforeEach(() => {
|
||||
mockCoreSetup = makeMockCoreSetup({ host: 'kibanaHost', port: 5601, protocol: 'http' });
|
||||
mockInitContext = makeMockInitContext({
|
||||
kibanaServer: {},
|
||||
});
|
||||
mockLogger = ({ warn: jest.fn(), debug: jest.fn() } as unknown) as Logger;
|
||||
mockLogger = ({ warn: jest.fn(), debug: jest.fn() } as unknown) as LevelLogger;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
@ -52,7 +53,8 @@ describe('Reporting server createConfig$', () => {
|
|||
});
|
||||
|
||||
it('creates random encryption key and default config using host, protocol, and port from server info', async () => {
|
||||
const result = await createConfig$(mockCoreSetup, mockInitContext, mockLogger).toPromise();
|
||||
const mockConfig$: any = mockInitContext.config.create();
|
||||
const result = await createConfig$(mockCoreSetup, mockConfig$, mockLogger).toPromise();
|
||||
|
||||
expect(result.encryptionKey).toMatch(/\S{32,}/); // random 32 characters
|
||||
expect(result.kibanaServer).toMatchInlineSnapshot(`
|
||||
|
@ -73,8 +75,8 @@ describe('Reporting server createConfig$', () => {
|
|||
encryptionKey: 'iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii',
|
||||
kibanaServer: {},
|
||||
});
|
||||
const result = await createConfig$(mockCoreSetup, mockInitContext, mockLogger).toPromise();
|
||||
|
||||
const mockConfig$: any = mockInitContext.config.create();
|
||||
const result = await createConfig$(mockCoreSetup, mockConfig$, mockLogger).toPromise();
|
||||
expect(result.encryptionKey).toMatch('iiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiii');
|
||||
expect((mockLogger.warn as any).mock.calls.length).toBe(0);
|
||||
});
|
||||
|
@ -88,7 +90,8 @@ describe('Reporting server createConfig$', () => {
|
|||
protocol: 'httpsa',
|
||||
},
|
||||
});
|
||||
const result = await createConfig$(mockCoreSetup, mockInitContext, mockLogger).toPromise();
|
||||
const mockConfig$: any = mockInitContext.config.create();
|
||||
const result = await createConfig$(mockCoreSetup, mockConfig$, mockLogger).toPromise();
|
||||
|
||||
expect(result).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
|
@ -115,7 +118,8 @@ describe('Reporting server createConfig$', () => {
|
|||
encryptionKey: 'aaaaaaaaaaaaabbbbbbbbbbbbaaaaaaaaa',
|
||||
kibanaServer: { hostname: '0' },
|
||||
});
|
||||
const result = await createConfig$(mockCoreSetup, mockInitContext, mockLogger).toPromise();
|
||||
const mockConfig$: any = mockInitContext.config.create();
|
||||
const result = await createConfig$(mockCoreSetup, mockConfig$, mockLogger).toPromise();
|
||||
|
||||
expect(result.kibanaServer).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
|
@ -136,7 +140,8 @@ describe('Reporting server createConfig$', () => {
|
|||
encryptionKey: '888888888888888888888888888888888',
|
||||
capture: { browser: { chromium: { disableSandbox: false } } },
|
||||
} as ReportingConfigType);
|
||||
const result = await createConfig$(mockCoreSetup, mockInitContext, mockLogger).toPromise();
|
||||
const mockConfig$: any = mockInitContext.config.create();
|
||||
const result = await createConfig$(mockCoreSetup, mockConfig$, mockLogger).toPromise();
|
||||
|
||||
expect(result.capture.browser.chromium).toMatchObject({ disableSandbox: false });
|
||||
expect((mockLogger.warn as any).mock.calls.length).toBe(0);
|
||||
|
@ -147,7 +152,8 @@ describe('Reporting server createConfig$', () => {
|
|||
encryptionKey: '888888888888888888888888888888888',
|
||||
capture: { browser: { chromium: { disableSandbox: true } } },
|
||||
} as ReportingConfigType);
|
||||
const result = await createConfig$(mockCoreSetup, mockInitContext, mockLogger).toPromise();
|
||||
const mockConfig$: any = mockInitContext.config.create();
|
||||
const result = await createConfig$(mockCoreSetup, mockConfig$, mockLogger).toPromise();
|
||||
|
||||
expect(result.capture.browser.chromium).toMatchObject({ disableSandbox: true });
|
||||
expect((mockLogger.warn as any).mock.calls.length).toBe(0);
|
||||
|
@ -157,7 +163,8 @@ describe('Reporting server createConfig$', () => {
|
|||
mockInitContext = makeMockInitContext({
|
||||
encryptionKey: '888888888888888888888888888888888',
|
||||
} as ReportingConfigType);
|
||||
const result = await createConfig$(mockCoreSetup, mockInitContext, mockLogger).toPromise();
|
||||
const mockConfig$: any = mockInitContext.config.create();
|
||||
const result = await createConfig$(mockCoreSetup, mockConfig$, mockLogger).toPromise();
|
||||
|
||||
expect(result.capture.browser.chromium).toMatchObject({ disableSandbox: expect.any(Boolean) });
|
||||
expect((mockLogger.warn as any).mock.calls.length).toBe(0);
|
||||
|
|
|
@ -5,13 +5,14 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n/';
|
||||
import { TypeOf } from '@kbn/config-schema';
|
||||
import crypto from 'crypto';
|
||||
import { capitalize } from 'lodash';
|
||||
import { Observable } from 'rxjs';
|
||||
import { map, mergeMap } from 'rxjs/operators';
|
||||
import { CoreSetup, Logger, PluginInitializerContext } from 'src/core/server';
|
||||
import { CoreSetup } from 'src/core/server';
|
||||
import { LevelLogger } from '../lib';
|
||||
import { getDefaultChromiumSandboxDisabled } from './default_chromium_sandbox_disabled';
|
||||
import { ConfigSchema } from './schema';
|
||||
import { ReportingConfigType } from './schema';
|
||||
|
||||
/*
|
||||
* Set up dynamic config defaults
|
||||
|
@ -19,8 +20,12 @@ import { ConfigSchema } from './schema';
|
|||
* - xpack.kibanaServer
|
||||
* - xpack.reporting.encryptionKey
|
||||
*/
|
||||
export function createConfig$(core: CoreSetup, context: PluginInitializerContext, logger: Logger) {
|
||||
return context.config.create<TypeOf<typeof ConfigSchema>>().pipe(
|
||||
export function createConfig$(
|
||||
core: CoreSetup,
|
||||
config$: Observable<ReportingConfigType>,
|
||||
logger: LevelLogger
|
||||
) {
|
||||
return config$.pipe(
|
||||
map((config) => {
|
||||
// encryption key
|
||||
let encryptionKey = config.encryptionKey;
|
||||
|
|
|
@ -5,11 +5,12 @@
|
|||
*/
|
||||
|
||||
import { PluginConfigDescriptor } from 'kibana/server';
|
||||
import { ConfigSchema, ConfigType } from './schema';
|
||||
|
||||
import { ConfigSchema, ReportingConfigType } from './schema';
|
||||
export { buildConfig } from './config';
|
||||
export { createConfig$ } from './create_config';
|
||||
export { ConfigSchema, ReportingConfigType };
|
||||
|
||||
export const config: PluginConfigDescriptor<ConfigType> = {
|
||||
export const config: PluginConfigDescriptor<ReportingConfigType> = {
|
||||
exposeToBrowser: { poll: true },
|
||||
schema: ConfigSchema,
|
||||
deprecations: ({ unused }) => [
|
||||
|
@ -20,5 +21,3 @@ export const config: PluginConfigDescriptor<ConfigType> = {
|
|||
unused('kibanaApp'),
|
||||
],
|
||||
};
|
||||
|
||||
export { ConfigSchema, ConfigType };
|
||||
|
|
|
@ -172,4 +172,4 @@ export const ConfigSchema = schema.object({
|
|||
poll: PollSchema,
|
||||
});
|
||||
|
||||
export type ConfigType = TypeOf<typeof ConfigSchema>;
|
||||
export type ReportingConfigType = TypeOf<typeof ConfigSchema>;
|
||||
|
|
|
@ -5,22 +5,22 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { first, mapTo, map } from 'rxjs/operators';
|
||||
import { first, map, mapTo } from 'rxjs/operators';
|
||||
import {
|
||||
BasePath,
|
||||
ElasticsearchServiceSetup,
|
||||
IRouter,
|
||||
KibanaRequest,
|
||||
SavedObjectsClientContract,
|
||||
SavedObjectsServiceStart,
|
||||
UiSettingsServiceStart,
|
||||
IRouter,
|
||||
SavedObjectsClientContract,
|
||||
BasePath,
|
||||
} from 'src/core/server';
|
||||
import { SecurityPluginSetup } from '../../../../plugins/security/server';
|
||||
import { LicensingPluginSetup } from '../../../../plugins/licensing/server';
|
||||
import { screenshotsObservableFactory } from '../export_types/common/lib/screenshots';
|
||||
import { LicensingPluginSetup } from '../../licensing/server';
|
||||
import { SecurityPluginSetup } from '../../security/server';
|
||||
import { ScreenshotsObservableFn } from '../server/types';
|
||||
import { ReportingConfig } from './';
|
||||
import { HeadlessChromiumDriverFactory } from './browsers/chromium/driver_factory';
|
||||
import { screenshotsObservableFactory } from './export_types/common/lib/screenshots';
|
||||
import { checkLicense, getExportTypesRegistry } from './lib';
|
||||
import { ESQueueInstance } from './lib/create_queue';
|
||||
import { EnqueueJobFn } from './lib/enqueue_job';
|
||||
|
@ -31,7 +31,7 @@ export interface ReportingInternalSetup {
|
|||
licensing: LicensingPluginSetup;
|
||||
basePath: BasePath['get'];
|
||||
router: IRouter;
|
||||
security: SecurityPluginSetup;
|
||||
security?: SecurityPluginSetup;
|
||||
}
|
||||
|
||||
interface ReportingInternalStart {
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { cryptoFactory, LevelLogger } from '../../../server/lib';
|
||||
import { cryptoFactory, LevelLogger } from '../../../lib';
|
||||
import { decryptJobHeaders } from './decrypt_job_headers';
|
||||
|
||||
const encryptHeaders = async (encryptionKey: string, headers: Record<string, string>) => {
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { cryptoFactory, LevelLogger } from '../../../server/lib';
|
||||
import { cryptoFactory, LevelLogger } from '../../../lib';
|
||||
|
||||
interface HasEncryptedHeaders {
|
||||
headers?: string;
|
|
@ -5,9 +5,10 @@
|
|||
*/
|
||||
|
||||
import sinon from 'sinon';
|
||||
import { ReportingConfig, ReportingCore } from '../../../server';
|
||||
import { JobDocPayload } from '../../../server/types';
|
||||
import { ReportingConfig } from '../../../';
|
||||
import { ReportingCore } from '../../../core';
|
||||
import { createMockReportingCore } from '../../../test_helpers';
|
||||
import { JobDocPayload } from '../../../types';
|
||||
import { JobDocPayloadPDF } from '../../printable_pdf/types';
|
||||
import { getConditionalHeaders, getCustomLogo } from './index';
|
||||
|
|
@ -4,8 +4,8 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { ReportingConfig } from '../../../server';
|
||||
import { ConditionalHeaders } from '../../../server/types';
|
||||
import { ReportingConfig } from '../../../';
|
||||
import { ConditionalHeaders } from '../../../types';
|
||||
|
||||
export const getConditionalHeaders = <JobDocPayloadType>({
|
||||
config,
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { ReportingCore } from '../../../server';
|
||||
import { ReportingCore } from '../../../core';
|
||||
import { createMockReportingCore } from '../../../test_helpers';
|
||||
import { JobDocPayloadPDF } from '../../printable_pdf/types';
|
||||
import { getConditionalHeaders, getCustomLogo } from './index';
|
|
@ -4,9 +4,9 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { UI_SETTINGS_CUSTOM_PDF_LOGO } from '../../../common/constants';
|
||||
import { ReportingConfig, ReportingCore } from '../../../server';
|
||||
import { ConditionalHeaders } from '../../../server/types';
|
||||
import { ReportingConfig, ReportingCore } from '../../../';
|
||||
import { UI_SETTINGS_CUSTOM_PDF_LOGO } from '../../../../common/constants';
|
||||
import { ConditionalHeaders } from '../../../types';
|
||||
import { JobDocPayloadPDF } from '../../printable_pdf/types'; // Logo is PDF only
|
||||
|
||||
export const getCustomLogo = async ({
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { ReportingConfig } from '../../../server';
|
||||
import { ReportingConfig } from '../../../';
|
||||
import { JobDocPayloadPNG } from '../../png/types';
|
||||
import { JobDocPayloadPDF } from '../../printable_pdf/types';
|
||||
import { getFullUrls } from './get_full_urls';
|
|
@ -7,12 +7,12 @@
|
|||
import {
|
||||
format as urlFormat,
|
||||
parse as urlParse,
|
||||
UrlWithStringQuery,
|
||||
UrlWithParsedQuery,
|
||||
UrlWithStringQuery,
|
||||
} from 'url';
|
||||
import { getAbsoluteUrlFactory } from '../../../common/get_absolute_url';
|
||||
import { validateUrls } from '../../../common/validate_urls';
|
||||
import { ReportingConfig } from '../../../server';
|
||||
import { ReportingConfig } from '../../..';
|
||||
import { getAbsoluteUrlFactory } from '../../../../common/get_absolute_url';
|
||||
import { validateUrls } from '../../../../common/validate_urls';
|
||||
import { JobDocPayloadPNG } from '../../png/types';
|
||||
import { JobDocPayloadPDF } from '../../printable_pdf/types';
|
||||
|
|
@ -7,7 +7,7 @@ import { omit } from 'lodash';
|
|||
import {
|
||||
KBN_SCREENSHOT_HEADER_BLACKLIST,
|
||||
KBN_SCREENSHOT_HEADER_BLACKLIST_STARTS_WITH_PATTERN,
|
||||
} from '../../../common/constants';
|
||||
} from '../../../../common/constants';
|
||||
|
||||
export const omitBlacklistedHeaders = <JobDocPayloadType>({
|
||||
job,
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { CaptureConfig } from '../../../server/types';
|
||||
import { CaptureConfig } from '../../../types';
|
||||
import { LayoutParams, LayoutTypes } from './';
|
||||
import { Layout } from './layout';
|
||||
import { PreserveLayout } from './preserve_layout';
|
|
@ -4,8 +4,8 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { HeadlessChromiumDriver } from '../../../server/browsers';
|
||||
import { LevelLogger } from '../../../server/lib';
|
||||
import { HeadlessChromiumDriver } from '../../../browsers';
|
||||
import { LevelLogger } from '../../../lib';
|
||||
import { Layout } from './layout';
|
||||
|
||||
export { createLayout } from './create_layout';
|
|
@ -6,9 +6,9 @@
|
|||
|
||||
import path from 'path';
|
||||
import { EvaluateFn, SerializableOrJSHandle } from 'puppeteer';
|
||||
import { LevelLogger } from '../../../server/lib';
|
||||
import { HeadlessChromiumDriver } from '../../../server/browsers';
|
||||
import { CaptureConfig } from '../../../server/types';
|
||||
import { CaptureConfig } from '../../../types';
|
||||
import { HeadlessChromiumDriver } from '../../../browsers';
|
||||
import { LevelLogger } from '../../../lib';
|
||||
import { getDefaultLayoutSelectors, LayoutSelectorDictionary, Size, LayoutTypes } from './';
|
||||
import { Layout } from './layout';
|
||||
|
|
@ -5,9 +5,9 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { HeadlessChromiumDriver } from '../../../../server/browsers';
|
||||
import { LevelLogger, startTrace } from '../../../../server/lib';
|
||||
import { AttributesMap, ElementsPositionAndAttribute } from '../../../../server/types';
|
||||
import { HeadlessChromiumDriver } from '../../../../browsers';
|
||||
import { LevelLogger, startTrace } from '../../../../lib';
|
||||
import { AttributesMap, ElementsPositionAndAttribute } from '../../../../types';
|
||||
import { LayoutInstance } from '../../layouts';
|
||||
import { CONTEXT_ELEMENTATTRIBUTES } from './constants';
|
||||
|
|
@ -5,9 +5,9 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { LevelLogger, startTrace } from '../../../../server/lib';
|
||||
import { HeadlessChromiumDriver } from '../../../../server/browsers';
|
||||
import { CaptureConfig } from '../../../../server/types';
|
||||
import { HeadlessChromiumDriver } from '../../../../browsers';
|
||||
import { LevelLogger, startTrace } from '../../../../lib';
|
||||
import { CaptureConfig } from '../../../../types';
|
||||
import { LayoutInstance } from '../../layouts';
|
||||
import { CONTEXT_GETNUMBEROFITEMS, CONTEXT_READMETADATA } from './constants';
|
||||
|
|
@ -5,9 +5,9 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { HeadlessChromiumDriver } from '../../../../server/browsers';
|
||||
import { LevelLogger, startTrace } from '../../../../server/lib';
|
||||
import { ElementsPositionAndAttribute, Screenshot } from '../../../../server/types';
|
||||
import { HeadlessChromiumDriver } from '../../../../browsers';
|
||||
import { LevelLogger, startTrace } from '../../../../lib';
|
||||
import { ElementsPositionAndAttribute, Screenshot } from '../../../../types';
|
||||
|
||||
export const getScreenshots = async (
|
||||
browser: HeadlessChromiumDriver,
|
|
@ -4,8 +4,8 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { LevelLogger, startTrace } from '../../../../server/lib';
|
||||
import { HeadlessChromiumDriver } from '../../../../server/browsers';
|
||||
import { HeadlessChromiumDriver } from '../../../../browsers';
|
||||
import { LevelLogger, startTrace } from '../../../../lib';
|
||||
import { LayoutInstance } from '../../layouts';
|
||||
import { CONTEXT_GETTIMERANGE } from './constants';
|
||||
|
|
@ -7,8 +7,8 @@
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import fs from 'fs';
|
||||
import { promisify } from 'util';
|
||||
import { LevelLogger, startTrace } from '../../../../server/lib';
|
||||
import { HeadlessChromiumDriver } from '../../../../server/browsers';
|
||||
import { HeadlessChromiumDriver } from '../../../../browsers';
|
||||
import { LevelLogger, startTrace } from '../../../../lib';
|
||||
import { Layout } from '../../layouts/layout';
|
||||
import { CONTEXT_INJECTCSS } from './constants';
|
||||
|
|
@ -4,7 +4,7 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
jest.mock('../../../../server/browsers/chromium/puppeteer', () => ({
|
||||
jest.mock('../../../../browsers/chromium/puppeteer', () => ({
|
||||
puppeteerLaunch: () => ({
|
||||
// Fixme needs event emitters
|
||||
newPage: () => ({
|
||||
|
@ -18,14 +18,10 @@ jest.mock('../../../../server/browsers/chromium/puppeteer', () => ({
|
|||
import * as Rx from 'rxjs';
|
||||
// eslint-disable-next-line @kbn/eslint/no-restricted-paths
|
||||
import { loggingServiceMock } from '../../../../../../../../src/core/server/mocks';
|
||||
import { HeadlessChromiumDriver } from '../../../../server/browsers';
|
||||
import { LevelLogger } from '../../../../server/lib';
|
||||
import {
|
||||
CaptureConfig,
|
||||
ConditionalHeaders,
|
||||
ElementsPositionAndAttribute,
|
||||
} from '../../../../server/types';
|
||||
import { HeadlessChromiumDriver } from '../../../../browsers';
|
||||
import { LevelLogger } from '../../../../lib';
|
||||
import { createMockBrowserDriverFactory, createMockLayoutInstance } from '../../../../test_helpers';
|
||||
import { CaptureConfig, ConditionalHeaders, ElementsPositionAndAttribute } from '../../../../types';
|
||||
import * as contexts from './constants';
|
||||
import { screenshotsObservableFactory } from './observable';
|
||||
|
|
@ -16,14 +16,14 @@ import {
|
|||
tap,
|
||||
toArray,
|
||||
} from 'rxjs/operators';
|
||||
import { HeadlessChromiumDriverFactory } from '../../../../server/browsers';
|
||||
import { HeadlessChromiumDriverFactory } from '../../../../browsers';
|
||||
import {
|
||||
CaptureConfig,
|
||||
ElementsPositionAndAttribute,
|
||||
ScreenshotObservableOpts,
|
||||
ScreenshotResults,
|
||||
ScreenshotsObservableFn,
|
||||
} from '../../../../server/types';
|
||||
} from '../../../../types';
|
||||
import { DEFAULT_PAGELOAD_SELECTOR } from '../../constants';
|
||||
import { getElementPositionAndAttributes } from './get_element_position_data';
|
||||
import { getNumberOfItems } from './get_number_of_items';
|
|
@ -5,9 +5,9 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { HeadlessChromiumDriver } from '../../../../server/browsers';
|
||||
import { LevelLogger, startTrace } from '../../../../server/lib';
|
||||
import { CaptureConfig, ConditionalHeaders } from '../../../../server/types';
|
||||
import { HeadlessChromiumDriver } from '../../../../browsers';
|
||||
import { LevelLogger, startTrace } from '../../../../lib';
|
||||
import { CaptureConfig, ConditionalHeaders } from '../../../../types';
|
||||
|
||||
export const openUrl = async (
|
||||
captureConfig: CaptureConfig,
|
|
@ -5,9 +5,9 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { LevelLogger, startTrace } from '../../../../server/lib';
|
||||
import { HeadlessChromiumDriver } from '../../../../server/browsers';
|
||||
import { CaptureConfig } from '../../../../server/types';
|
||||
import { HeadlessChromiumDriver } from '../../../../browsers';
|
||||
import { LevelLogger, startTrace } from '../../../../lib';
|
||||
import { CaptureConfig } from '../../../../types';
|
||||
import { LayoutInstance } from '../../layouts';
|
||||
import { CONTEXT_WAITFORRENDER } from './constants';
|
||||
|
|
@ -5,9 +5,9 @@
|
|||
*/
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { LevelLogger, startTrace } from '../../../../server/lib';
|
||||
import { HeadlessChromiumDriver } from '../../../../server/browsers';
|
||||
import { CaptureConfig } from '../../../../server/types';
|
||||
import { HeadlessChromiumDriver } from '../../../../browsers';
|
||||
import { LevelLogger, startTrace } from '../../../../lib';
|
||||
import { CaptureConfig } from '../../../../types';
|
||||
import { LayoutInstance } from '../../layouts';
|
||||
import { CONTEXT_WAITFORELEMENTSTOBEINDOM } from './constants';
|
||||
|
|
@ -5,19 +5,15 @@
|
|||
*/
|
||||
|
||||
import {
|
||||
CSV_JOB_TYPE as jobType,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_STANDARD,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
} from '../../common/constants';
|
||||
import {
|
||||
ESQueueCreateJobFn,
|
||||
ESQueueWorkerExecuteFn,
|
||||
ExportTypeDefinition,
|
||||
} from '../../server/types';
|
||||
} from '../../../common/constants';
|
||||
import { CSV_JOB_TYPE as jobType } from '../../../constants';
|
||||
import { ESQueueCreateJobFn, ESQueueWorkerExecuteFn, ExportTypeDefinition } from '../../types';
|
||||
import { metadata } from './metadata';
|
||||
import { createJobFactory } from './server/create_job';
|
||||
import { executeJobFactory } from './server/execute_job';
|
|
@ -5,9 +5,9 @@
|
|||
*/
|
||||
|
||||
import { KibanaRequest, RequestHandlerContext } from 'src/core/server';
|
||||
import { ReportingCore } from '../../../server';
|
||||
import { cryptoFactory } from '../../../server/lib';
|
||||
import { CreateJobFactory, ESQueueCreateJobFn } from '../../../server/types';
|
||||
import { ReportingCore } from '../../../';
|
||||
import { cryptoFactory } from '../../../lib';
|
||||
import { CreateJobFactory, ESQueueCreateJobFn } from '../../../types';
|
||||
import { JobParamsDiscoverCsv } from '../types';
|
||||
|
||||
export const createJobFactory: CreateJobFactory<ESQueueCreateJobFn<
|
|
@ -9,10 +9,10 @@ import nodeCrypto from '@elastic/node-crypto';
|
|||
import Puid from 'puid';
|
||||
import sinon from 'sinon';
|
||||
import { fieldFormats } from '../../../../../../../src/plugins/data/server';
|
||||
import { CancellationToken } from '../../../../../../plugins/reporting/common';
|
||||
import { CSV_BOM_CHARS } from '../../../common/constants';
|
||||
import { LevelLogger } from '../../../server/lib';
|
||||
import { setFieldFormats } from '../../../server/services';
|
||||
import { CancellationToken } from '../../../../common';
|
||||
import { CSV_BOM_CHARS } from '../../../../common/constants';
|
||||
import { LevelLogger } from '../../../lib';
|
||||
import { setFieldFormats } from '../../../services';
|
||||
import { createMockReportingCore } from '../../../test_helpers';
|
||||
import { JobDocPayloadDiscoverCsv } from '../types';
|
||||
import { executeJobFactory } from './execute_job';
|
||||
|
@ -45,7 +45,7 @@ describe('CSV Execute Job', function () {
|
|||
let clusterStub: any;
|
||||
let configGetStub: any;
|
||||
let mockReportingConfig: any;
|
||||
let mockReportingPlugin: any;
|
||||
let mockReportingCore: any;
|
||||
let callAsCurrentUserStub: any;
|
||||
let cancellationToken: any;
|
||||
|
||||
|
@ -73,9 +73,10 @@ describe('CSV Execute Job', function () {
|
|||
configGetStub.withArgs('csv', 'scroll').returns({});
|
||||
mockReportingConfig = { get: configGetStub, kbnConfig: { get: configGetStub } };
|
||||
|
||||
mockReportingPlugin = await createMockReportingCore(mockReportingConfig);
|
||||
mockReportingPlugin.getUiSettingsServiceFactory = () => Promise.resolve(mockUiSettingsClient);
|
||||
mockReportingPlugin.getElasticsearchService = () => Promise.resolve(mockElasticsearch);
|
||||
mockReportingCore = await createMockReportingCore(mockReportingConfig);
|
||||
mockReportingCore.getUiSettingsServiceFactory = () => Promise.resolve(mockUiSettingsClient);
|
||||
mockReportingCore.getElasticsearchService = () => Promise.resolve(mockElasticsearch);
|
||||
mockReportingCore.config = mockReportingConfig;
|
||||
|
||||
cancellationToken = new CancellationToken();
|
||||
|
||||
|
@ -116,7 +117,7 @@ describe('CSV Execute Job', function () {
|
|||
|
||||
describe('basic Elasticsearch call behavior', function () {
|
||||
it('should decrypt encrypted headers and pass to callAsCurrentUser', async function () {
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
await executeJob(
|
||||
'job456',
|
||||
getJobDocPayload({
|
||||
|
@ -136,7 +137,7 @@ describe('CSV Execute Job', function () {
|
|||
testBody: true,
|
||||
};
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const job = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: [],
|
||||
|
@ -163,7 +164,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: scrollId,
|
||||
});
|
||||
callAsCurrentUserStub.onSecondCall().resolves(defaultElasticsearchResponse);
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
await executeJob(
|
||||
'job456',
|
||||
getJobDocPayload({
|
||||
|
@ -181,7 +182,7 @@ describe('CSV Execute Job', function () {
|
|||
});
|
||||
|
||||
it('should not execute scroll if there are no hits from the search', async function () {
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
await executeJob(
|
||||
'job456',
|
||||
getJobDocPayload({
|
||||
|
@ -215,7 +216,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
await executeJob(
|
||||
'job456',
|
||||
getJobDocPayload({
|
||||
|
@ -254,7 +255,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: lastScrollId,
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
await executeJob(
|
||||
'job456',
|
||||
getJobDocPayload({
|
||||
|
@ -286,7 +287,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: lastScrollId,
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -313,7 +314,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -338,7 +339,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['=SUM(A1:A2)', 'two'],
|
||||
|
@ -364,7 +365,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -390,7 +391,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['=SUM(A1:A2)', 'two'],
|
||||
|
@ -416,7 +417,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -443,7 +444,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -464,7 +465,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -487,7 +488,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -508,7 +509,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -524,7 +525,7 @@ describe('CSV Execute Job', function () {
|
|||
describe('Elasticsearch call errors', function () {
|
||||
it('should reject Promise if search call errors out', async function () {
|
||||
callAsCurrentUserStub.rejects(new Error());
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: [],
|
||||
|
@ -543,7 +544,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
callAsCurrentUserStub.onSecondCall().rejects(new Error());
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: [],
|
||||
|
@ -564,7 +565,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: undefined,
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: [],
|
||||
|
@ -585,7 +586,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: undefined,
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: [],
|
||||
|
@ -613,7 +614,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: undefined,
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: [],
|
||||
|
@ -641,7 +642,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: undefined,
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: [],
|
||||
|
@ -677,7 +678,7 @@ describe('CSV Execute Job', function () {
|
|||
});
|
||||
|
||||
it('should stop calling Elasticsearch when cancellationToken.cancel is called', async function () {
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
executeJob(
|
||||
'job345',
|
||||
getJobDocPayload({
|
||||
|
@ -696,7 +697,7 @@ describe('CSV Execute Job', function () {
|
|||
});
|
||||
|
||||
it(`shouldn't call clearScroll if it never got a scrollId`, async function () {
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
executeJob(
|
||||
'job345',
|
||||
getJobDocPayload({
|
||||
|
@ -714,7 +715,7 @@ describe('CSV Execute Job', function () {
|
|||
});
|
||||
|
||||
it('should call clearScroll if it got a scrollId', async function () {
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
executeJob(
|
||||
'job345',
|
||||
getJobDocPayload({
|
||||
|
@ -736,7 +737,7 @@ describe('CSV Execute Job', function () {
|
|||
|
||||
describe('csv content', function () {
|
||||
it('should write column headers to output, even if there are no results', async function () {
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -748,7 +749,7 @@ describe('CSV Execute Job', function () {
|
|||
|
||||
it('should use custom uiSettings csv:separator for header', async function () {
|
||||
mockUiSettingsClient.get.withArgs('csv:separator').returns(';');
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -760,7 +761,7 @@ describe('CSV Execute Job', function () {
|
|||
|
||||
it('should escape column headers if uiSettings csv:quoteValues is true', async function () {
|
||||
mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(true);
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one and a half', 'two', 'three-and-four', 'five & six'],
|
||||
|
@ -772,7 +773,7 @@ describe('CSV Execute Job', function () {
|
|||
|
||||
it(`shouldn't escape column headers if uiSettings csv:quoteValues is false`, async function () {
|
||||
mockUiSettingsClient.get.withArgs('csv:quoteValues').returns(false);
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one and a half', 'two', 'three-and-four', 'five & six'],
|
||||
|
@ -783,7 +784,7 @@ describe('CSV Execute Job', function () {
|
|||
});
|
||||
|
||||
it('should write column headers to output, when there are results', async function () {
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
callAsCurrentUserStub.onFirstCall().resolves({
|
||||
hits: {
|
||||
hits: [{ one: '1', two: '2' }],
|
||||
|
@ -803,7 +804,7 @@ describe('CSV Execute Job', function () {
|
|||
});
|
||||
|
||||
it('should use comma separated values of non-nested fields from _source', async function () {
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
callAsCurrentUserStub.onFirstCall().resolves({
|
||||
hits: {
|
||||
hits: [{ _source: { one: 'foo', two: 'bar' } }],
|
||||
|
@ -824,7 +825,7 @@ describe('CSV Execute Job', function () {
|
|||
});
|
||||
|
||||
it('should concatenate the hits from multiple responses', async function () {
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
callAsCurrentUserStub.onFirstCall().resolves({
|
||||
hits: {
|
||||
hits: [{ _source: { one: 'foo', two: 'bar' } }],
|
||||
|
@ -852,7 +853,7 @@ describe('CSV Execute Job', function () {
|
|||
});
|
||||
|
||||
it('should use field formatters to format fields', async function () {
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
callAsCurrentUserStub.onFirstCall().resolves({
|
||||
hits: {
|
||||
hits: [{ _source: { one: 'foo', two: 'bar' } }],
|
||||
|
@ -894,7 +895,7 @@ describe('CSV Execute Job', function () {
|
|||
beforeEach(async function () {
|
||||
configGetStub.withArgs('csv', 'maxSizeBytes').returns(1);
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -924,7 +925,7 @@ describe('CSV Execute Job', function () {
|
|||
beforeEach(async function () {
|
||||
configGetStub.withArgs('csv', 'maxSizeBytes').returns(9);
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -961,7 +962,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -990,7 +991,7 @@ describe('CSV Execute Job', function () {
|
|||
let maxSizeReached: boolean;
|
||||
|
||||
beforeEach(async function () {
|
||||
mockReportingPlugin.getUiSettingsServiceFactory = () => mockUiSettingsClient;
|
||||
mockReportingCore.getUiSettingsServiceFactory = () => mockUiSettingsClient;
|
||||
configGetStub.withArgs('csv', 'maxSizeBytes').returns(18);
|
||||
|
||||
callAsCurrentUserStub.onFirstCall().returns({
|
||||
|
@ -1000,7 +1001,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -1037,7 +1038,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -1063,7 +1064,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
||||
|
@ -1089,7 +1090,7 @@ describe('CSV Execute Job', function () {
|
|||
_scroll_id: 'scrollId',
|
||||
});
|
||||
|
||||
const executeJob = await executeJobFactory(mockReportingPlugin, mockLogger);
|
||||
const executeJob = await executeJobFactory(mockReportingCore, mockLogger);
|
||||
const jobParams = getJobDocPayload({
|
||||
headers: encryptedHeaders,
|
||||
fields: ['one', 'two'],
|
|
@ -7,11 +7,11 @@
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import Hapi from 'hapi';
|
||||
import { IUiSettingsClient, KibanaRequest } from '../../../../../../../src/core/server';
|
||||
import { CSV_BOM_CHARS, CSV_JOB_TYPE } from '../../../common/constants';
|
||||
import { ReportingCore } from '../../../server';
|
||||
import { cryptoFactory, LevelLogger } from '../../../server/lib';
|
||||
import { getFieldFormats } from '../../../server/services';
|
||||
import { ESQueueWorkerExecuteFn, ExecuteJobFactory } from '../../../server/types';
|
||||
import { ReportingCore } from '../../..';
|
||||
import { CSV_BOM_CHARS, CSV_JOB_TYPE } from '../../../../common/constants';
|
||||
import { getFieldFormats } from '../../../../server/services';
|
||||
import { cryptoFactory, LevelLogger } from '../../../lib';
|
||||
import { ESQueueWorkerExecuteFn, ExecuteJobFactory } from '../../../types';
|
||||
import { JobDocPayloadDiscoverCsv } from '../types';
|
||||
import { fieldFormatMapFactory } from './lib/field_format_map';
|
||||
import { createGenerateCsv } from './lib/generate_csv';
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
import { startsWith } from 'lodash';
|
||||
import { CSV_FORMULA_CHARS } from '../../../../common/constants';
|
||||
import { CSV_FORMULA_CHARS } from '../../../../../common/constants';
|
||||
|
||||
export const cellHasFormulas = (val: string) =>
|
||||
CSV_FORMULA_CHARS.some((formulaChar) => startsWith(val, formulaChar));
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue