mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Reporting/PDF] Refactor screenshot pipeline for multi-url by default (#48588)
* Multi-url pass to screenshotsObservable * Restore "first" operator * max attempt = 1 for testing * cleanup debug * restore more concatMap Co-authored-by: Elastic Machine <elasticmachine@users.noreply.github.com>
This commit is contained in:
parent
8d37637c07
commit
fa8da7c349
22 changed files with 269 additions and 346 deletions
|
@ -43,7 +43,7 @@ describe('headers', () => {
|
|||
};
|
||||
|
||||
const encryptedHeaders = await encryptHeaders(headers);
|
||||
const { decryptedHeaders } = await decryptJobHeaders({
|
||||
const decryptedHeaders = await decryptJobHeaders({
|
||||
job: {
|
||||
title: 'cool-job-bro',
|
||||
type: 'csv',
|
||||
|
|
|
@ -17,22 +17,18 @@ export const decryptJobHeaders = async <
|
|||
JobParamsType,
|
||||
JobDocPayloadType extends HasEncryptedHeaders
|
||||
>({
|
||||
job,
|
||||
server,
|
||||
job,
|
||||
logger,
|
||||
}: {
|
||||
job: JobDocPayloadType;
|
||||
server: ServerFacade;
|
||||
job: JobDocPayloadType;
|
||||
logger: Logger;
|
||||
}): Promise<{
|
||||
job: JobDocPayloadType;
|
||||
server: ServerFacade;
|
||||
decryptedHeaders: Record<string, string>;
|
||||
}> => {
|
||||
}): Promise<Record<string, string>> => {
|
||||
const crypto: CryptoFactory = cryptoFactory(server);
|
||||
try {
|
||||
const decryptedHeaders: Record<string, string> = await crypto.decrypt(job.headers);
|
||||
return { job, decryptedHeaders, server };
|
||||
return decryptedHeaders;
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ describe('conditions', () => {
|
|||
baz: 'quix',
|
||||
};
|
||||
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {} as JobDocPayload<any>,
|
||||
filteredHeaders: permittedHeaders,
|
||||
server: mockServer,
|
||||
|
@ -44,7 +44,7 @@ describe('conditions', () => {
|
|||
baz: 'quix',
|
||||
};
|
||||
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {} as JobDocPayload<any>,
|
||||
filteredHeaders: permittedHeaders,
|
||||
server: mockServer,
|
||||
|
@ -65,7 +65,7 @@ describe('conditions', () => {
|
|||
baz: 'quix',
|
||||
};
|
||||
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {} as JobDocPayload<any>,
|
||||
filteredHeaders: permittedHeaders,
|
||||
server: mockServer,
|
||||
|
@ -82,7 +82,7 @@ describe('conditions', () => {
|
|||
baz: 'quix',
|
||||
};
|
||||
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {} as JobDocPayload<any>,
|
||||
filteredHeaders: permittedHeaders,
|
||||
server: mockServer,
|
||||
|
@ -97,7 +97,7 @@ describe('conditions', () => {
|
|||
baz: 'quix',
|
||||
};
|
||||
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {} as JobDocPayload<any>,
|
||||
filteredHeaders: permittedHeaders,
|
||||
server: mockServer,
|
||||
|
@ -120,7 +120,7 @@ describe('conditions', () => {
|
|||
baz: 'quix',
|
||||
};
|
||||
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {} as JobDocPayload<any>,
|
||||
filteredHeaders: permittedHeaders,
|
||||
server: mockServer,
|
||||
|
@ -137,7 +137,7 @@ describe('conditions', () => {
|
|||
baz: 'quix',
|
||||
};
|
||||
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {} as JobDocPayload<any>,
|
||||
filteredHeaders: permittedHeaders,
|
||||
server: mockServer,
|
||||
|
@ -153,7 +153,7 @@ test('uses basePath from job when creating saved object service', async () => {
|
|||
baz: 'quix',
|
||||
};
|
||||
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {} as JobDocPayload<any>,
|
||||
filteredHeaders: permittedHeaders,
|
||||
server: mockServer,
|
||||
|
@ -180,7 +180,7 @@ test(`uses basePath from server if job doesn't have a basePath when creating sav
|
|||
baz: 'quix',
|
||||
};
|
||||
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {} as JobDocPayload<any>,
|
||||
filteredHeaders: permittedHeaders,
|
||||
server: mockServer,
|
||||
|
@ -203,7 +203,7 @@ test(`uses basePath from server if job doesn't have a basePath when creating sav
|
|||
describe('config formatting', () => {
|
||||
test(`lowercases server.host`, async () => {
|
||||
mockServer = createMockServer({ settings: { 'server.host': 'COOL-HOSTNAME' } });
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {} as JobDocPayload<any>,
|
||||
filteredHeaders: {},
|
||||
server: mockServer,
|
||||
|
@ -215,7 +215,7 @@ describe('config formatting', () => {
|
|||
mockServer = createMockServer({
|
||||
settings: { 'xpack.reporting.kibanaServer.hostname': 'GREAT-HOSTNAME' },
|
||||
});
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {
|
||||
title: 'cool-job-bro',
|
||||
type: 'csv',
|
||||
|
|
|
@ -6,13 +6,13 @@
|
|||
import { ConditionalHeaders, ServerFacade } from '../../../types';
|
||||
|
||||
export const getConditionalHeaders = <JobDocPayloadType>({
|
||||
server,
|
||||
job,
|
||||
filteredHeaders,
|
||||
server,
|
||||
}: {
|
||||
server: ServerFacade;
|
||||
job: JobDocPayloadType;
|
||||
filteredHeaders: Record<string, string>;
|
||||
server: ServerFacade;
|
||||
}) => {
|
||||
const config = server.config();
|
||||
const [hostname, port, basePath, protocol] = [
|
||||
|
@ -32,5 +32,5 @@ export const getConditionalHeaders = <JobDocPayloadType>({
|
|||
},
|
||||
};
|
||||
|
||||
return { job, conditionalHeaders, server };
|
||||
return conditionalHeaders;
|
||||
};
|
||||
|
|
|
@ -19,7 +19,7 @@ test(`gets logo from uiSettings`, async () => {
|
|||
baz: 'quix',
|
||||
};
|
||||
|
||||
const { conditionalHeaders } = await getConditionalHeaders({
|
||||
const conditionalHeaders = await getConditionalHeaders({
|
||||
job: {} as JobDocPayloadPDF,
|
||||
filteredHeaders: permittedHeaders,
|
||||
server: mockServer,
|
||||
|
|
|
@ -9,13 +9,13 @@ import { ConditionalHeaders, ServerFacade } from '../../../types';
|
|||
import { JobDocPayloadPDF } from '../../printable_pdf/types'; // Logo is PDF only
|
||||
|
||||
export const getCustomLogo = async ({
|
||||
server,
|
||||
job,
|
||||
conditionalHeaders,
|
||||
server,
|
||||
}: {
|
||||
server: ServerFacade;
|
||||
job: JobDocPayloadPDF;
|
||||
conditionalHeaders: ConditionalHeaders;
|
||||
server: ServerFacade;
|
||||
}) => {
|
||||
const serverBasePath: string = server.config().get('server.basePath');
|
||||
|
||||
|
@ -38,12 +38,8 @@ export const getCustomLogo = async ({
|
|||
};
|
||||
|
||||
const savedObjects = server.savedObjects;
|
||||
|
||||
const savedObjectsClient = savedObjects.getScopedSavedObjectsClient(fakeRequest);
|
||||
|
||||
const uiSettings = server.uiSettingsServiceFactory({ savedObjectsClient });
|
||||
|
||||
const logo = await uiSettings.get(UI_SETTINGS_CUSTOM_PDF_LOGO);
|
||||
|
||||
return { job, conditionalHeaders, logo, server };
|
||||
const logo: string = await uiSettings.get(UI_SETTINGS_CUSTOM_PDF_LOGO);
|
||||
return { conditionalHeaders, logo };
|
||||
};
|
||||
|
|
|
@ -22,29 +22,26 @@ beforeEach(() => {
|
|||
});
|
||||
|
||||
test(`fails if no URL is passed`, async () => {
|
||||
await expect(
|
||||
const fn = () =>
|
||||
getFullUrls({
|
||||
job: {},
|
||||
server: mockServer,
|
||||
} as FullUrlsOpts)
|
||||
).rejects.toMatchInlineSnapshot(
|
||||
`[Error: No valid URL fields found in Job Params! Expected \`job.relativeUrl\` or \`job.objects[{ relativeUrl }]\`]`
|
||||
} as FullUrlsOpts);
|
||||
expect(fn).toThrowErrorMatchingInlineSnapshot(
|
||||
`"No valid URL fields found in Job Params! Expected \`job.relativeUrl\` or \`job.objects[{ relativeUrl }]\`"`
|
||||
);
|
||||
});
|
||||
|
||||
test(`fails if URLs are file-protocols for PNGs`, async () => {
|
||||
const forceNow = '2000-01-01T00:00:00.000Z';
|
||||
const relativeUrl = 'file://etc/passwd/#/something';
|
||||
await expect(
|
||||
const fn = () =>
|
||||
getFullUrls({
|
||||
job: {
|
||||
relativeUrl,
|
||||
forceNow,
|
||||
},
|
||||
job: { relativeUrl, forceNow },
|
||||
server: mockServer,
|
||||
} as FullUrlsOpts)
|
||||
).rejects.toMatchInlineSnapshot(
|
||||
`[Error: Found invalid URL(s), all URLs must be relative: ${relativeUrl}]`
|
||||
} as FullUrlsOpts);
|
||||
expect(fn).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Found invalid URL(s), all URLs must be relative: file://etc/passwd/#/something"`
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -52,36 +49,26 @@ test(`fails if URLs are absolute for PNGs`, async () => {
|
|||
const forceNow = '2000-01-01T00:00:00.000Z';
|
||||
const relativeUrl =
|
||||
'http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something';
|
||||
await expect(
|
||||
const fn = () =>
|
||||
getFullUrls({
|
||||
job: {
|
||||
relativeUrl,
|
||||
forceNow,
|
||||
},
|
||||
job: { relativeUrl, forceNow },
|
||||
server: mockServer,
|
||||
} as FullUrlsOpts)
|
||||
).rejects.toMatchInlineSnapshot(
|
||||
`[Error: Found invalid URL(s), all URLs must be relative: ${relativeUrl}]`
|
||||
} as FullUrlsOpts);
|
||||
expect(fn).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something"`
|
||||
);
|
||||
});
|
||||
|
||||
test(`fails if URLs are file-protocols for PDF`, async () => {
|
||||
const forceNow = '2000-01-01T00:00:00.000Z';
|
||||
const relativeUrl = 'file://etc/passwd/#/something';
|
||||
await expect(
|
||||
const fn = () =>
|
||||
getFullUrls({
|
||||
job: {
|
||||
objects: [
|
||||
{
|
||||
relativeUrl,
|
||||
},
|
||||
],
|
||||
forceNow,
|
||||
},
|
||||
job: { objects: [{ relativeUrl }], forceNow },
|
||||
server: mockServer,
|
||||
} as FullUrlsOpts)
|
||||
).rejects.toMatchInlineSnapshot(
|
||||
`[Error: Found invalid URL(s), all URLs must be relative: ${relativeUrl}]`
|
||||
} as FullUrlsOpts);
|
||||
expect(fn).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Found invalid URL(s), all URLs must be relative: file://etc/passwd/#/something"`
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -89,7 +76,7 @@ test(`fails if URLs are absolute for PDF`, async () => {
|
|||
const forceNow = '2000-01-01T00:00:00.000Z';
|
||||
const relativeUrl =
|
||||
'http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something';
|
||||
await expect(
|
||||
const fn = () =>
|
||||
getFullUrls({
|
||||
job: {
|
||||
objects: [
|
||||
|
@ -100,59 +87,48 @@ test(`fails if URLs are absolute for PDF`, async () => {
|
|||
forceNow,
|
||||
},
|
||||
server: mockServer,
|
||||
} as FullUrlsOpts)
|
||||
).rejects.toMatchInlineSnapshot(
|
||||
`[Error: Found invalid URL(s), all URLs must be relative: ${relativeUrl}]`
|
||||
} as FullUrlsOpts);
|
||||
expect(fn).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something"`
|
||||
);
|
||||
});
|
||||
|
||||
test(`fails if any URLs are absolute or file's for PDF`, async () => {
|
||||
const forceNow = '2000-01-01T00:00:00.000Z';
|
||||
const objects = [
|
||||
{
|
||||
relativeUrl: '/app/kibana#/something_aaa',
|
||||
},
|
||||
{ relativeUrl: '/app/kibana#/something_aaa' },
|
||||
{
|
||||
relativeUrl:
|
||||
'http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something',
|
||||
},
|
||||
{
|
||||
relativeUrl: 'file://etc/passwd/#/something',
|
||||
},
|
||||
{ relativeUrl: 'file://etc/passwd/#/something' },
|
||||
];
|
||||
await expect(
|
||||
|
||||
const fn = () =>
|
||||
getFullUrls({
|
||||
job: {
|
||||
objects,
|
||||
forceNow,
|
||||
},
|
||||
job: { objects, forceNow },
|
||||
server: mockServer,
|
||||
} as FullUrlsOpts)
|
||||
).rejects.toMatchInlineSnapshot(
|
||||
`[Error: Found invalid URL(s), all URLs must be relative: ${objects[1].relativeUrl} ${objects[2].relativeUrl}]`
|
||||
} as FullUrlsOpts);
|
||||
expect(fn).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Found invalid URL(s), all URLs must be relative: http://169.254.169.254/latest/meta-data/iam/security-credentials/profileName/#/something file://etc/passwd/#/something"`
|
||||
);
|
||||
});
|
||||
|
||||
test(`fails if URL does not route to a visualization`, async () => {
|
||||
await expect(
|
||||
const fn = () =>
|
||||
getFullUrls({
|
||||
job: {
|
||||
relativeUrl: '/app/phoney',
|
||||
},
|
||||
job: { relativeUrl: '/app/phoney' },
|
||||
server: mockServer,
|
||||
} as FullUrlsOpts)
|
||||
).rejects.toMatchInlineSnapshot(
|
||||
`[Error: No valid hash in the URL! A hash is expected for the application to route to the intended visualization.]`
|
||||
} as FullUrlsOpts);
|
||||
expect(fn).toThrowErrorMatchingInlineSnapshot(
|
||||
`"No valid hash in the URL! A hash is expected for the application to route to the intended visualization."`
|
||||
);
|
||||
});
|
||||
|
||||
test(`adds forceNow to hash's query, if it exists`, async () => {
|
||||
const forceNow = '2000-01-01T00:00:00.000Z';
|
||||
const { urls } = await getFullUrls({
|
||||
job: {
|
||||
relativeUrl: '/app/kibana#/something',
|
||||
forceNow,
|
||||
},
|
||||
const urls = await getFullUrls({
|
||||
job: { relativeUrl: '/app/kibana#/something', forceNow },
|
||||
server: mockServer,
|
||||
} as FullUrlsOpts);
|
||||
|
||||
|
@ -164,11 +140,8 @@ test(`adds forceNow to hash's query, if it exists`, async () => {
|
|||
test(`appends forceNow to hash's query, if it exists`, async () => {
|
||||
const forceNow = '2000-01-01T00:00:00.000Z';
|
||||
|
||||
const { urls } = await getFullUrls({
|
||||
job: {
|
||||
relativeUrl: '/app/kibana#/something?_g=something',
|
||||
forceNow,
|
||||
},
|
||||
const urls = await getFullUrls({
|
||||
job: { relativeUrl: '/app/kibana#/something?_g=something', forceNow },
|
||||
server: mockServer,
|
||||
} as FullUrlsOpts);
|
||||
|
||||
|
@ -178,10 +151,8 @@ test(`appends forceNow to hash's query, if it exists`, async () => {
|
|||
});
|
||||
|
||||
test(`doesn't append forceNow query to url, if it doesn't exists`, async () => {
|
||||
const { urls } = await getFullUrls({
|
||||
job: {
|
||||
relativeUrl: '/app/kibana#/something',
|
||||
},
|
||||
const urls = await getFullUrls({
|
||||
job: { relativeUrl: '/app/kibana#/something' },
|
||||
server: mockServer,
|
||||
} as FullUrlsOpts);
|
||||
|
||||
|
@ -190,7 +161,7 @@ test(`doesn't append forceNow query to url, if it doesn't exists`, async () => {
|
|||
|
||||
test(`adds forceNow to each of multiple urls`, async () => {
|
||||
const forceNow = '2000-01-01T00:00:00.000Z';
|
||||
const { urls } = await getFullUrls({
|
||||
const urls = await getFullUrls({
|
||||
job: {
|
||||
objects: [
|
||||
{ relativeUrl: '/app/kibana#/something_aaa' },
|
||||
|
|
|
@ -12,7 +12,7 @@ import {
|
|||
} from 'url';
|
||||
import { getAbsoluteUrlFactory } from '../../../common/get_absolute_url';
|
||||
import { validateUrls } from '../../../common/validate_urls';
|
||||
import { ServerFacade, ConditionalHeaders } from '../../../types';
|
||||
import { ServerFacade } from '../../../types';
|
||||
import { JobDocPayloadPNG } from '../../png/types';
|
||||
import { JobDocPayloadPDF } from '../../printable_pdf/types';
|
||||
|
||||
|
@ -23,15 +23,12 @@ function isPdfJob(job: JobDocPayloadPNG | JobDocPayloadPDF): job is JobDocPayloa
|
|||
return (job as JobDocPayloadPDF).objects !== undefined;
|
||||
}
|
||||
|
||||
export async function getFullUrls<JobDocPayloadType>({
|
||||
job,
|
||||
export function getFullUrls<JobDocPayloadType>({
|
||||
server,
|
||||
...mergeValues // pass-throughs
|
||||
job,
|
||||
}: {
|
||||
job: JobDocPayloadPDF | JobDocPayloadPNG;
|
||||
server: ServerFacade;
|
||||
conditionalHeaders: ConditionalHeaders;
|
||||
logo?: string;
|
||||
job: JobDocPayloadPDF | JobDocPayloadPNG;
|
||||
}) {
|
||||
const config = server.config();
|
||||
|
||||
|
@ -96,5 +93,5 @@ export async function getFullUrls<JobDocPayloadType>({
|
|||
});
|
||||
});
|
||||
|
||||
return { job, server, urls, ...mergeValues };
|
||||
return urls;
|
||||
}
|
||||
|
|
|
@ -4,14 +4,8 @@
|
|||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { createMockServer } from '../../../test_helpers/create_mock_server';
|
||||
import { omitBlacklistedHeaders } from './index';
|
||||
|
||||
let mockServer: any;
|
||||
beforeEach(() => {
|
||||
mockServer = createMockServer('');
|
||||
});
|
||||
|
||||
test(`omits blacklisted headers`, async () => {
|
||||
const permittedHeaders = {
|
||||
foo: 'bar',
|
||||
|
@ -27,7 +21,7 @@ test(`omits blacklisted headers`, async () => {
|
|||
'transfer-encoding': '',
|
||||
};
|
||||
|
||||
const { filteredHeaders } = await omitBlacklistedHeaders({
|
||||
const filteredHeaders = await omitBlacklistedHeaders({
|
||||
job: {
|
||||
title: 'cool-job-bro',
|
||||
type: 'csv',
|
||||
|
@ -41,7 +35,6 @@ test(`omits blacklisted headers`, async () => {
|
|||
...permittedHeaders,
|
||||
...blacklistedHeaders,
|
||||
},
|
||||
server: mockServer,
|
||||
});
|
||||
|
||||
expect(filteredHeaders).toEqual(permittedHeaders);
|
||||
|
|
|
@ -5,20 +5,17 @@
|
|||
*/
|
||||
import { omit } from 'lodash';
|
||||
import { KBN_SCREENSHOT_HEADER_BLACKLIST } from '../../../common/constants';
|
||||
import { ServerFacade } from '../../../types';
|
||||
|
||||
export const omitBlacklistedHeaders = <JobDocPayloadType>({
|
||||
job,
|
||||
decryptedHeaders,
|
||||
server,
|
||||
}: {
|
||||
job: JobDocPayloadType;
|
||||
decryptedHeaders: Record<string, string>;
|
||||
server: ServerFacade;
|
||||
}) => {
|
||||
const filteredHeaders: Record<string, string> = omit(
|
||||
decryptedHeaders,
|
||||
KBN_SCREENSHOT_HEADER_BLACKLIST
|
||||
);
|
||||
return { job, filteredHeaders, server };
|
||||
return filteredHeaders;
|
||||
};
|
||||
|
|
|
@ -5,21 +5,9 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { first, mergeMap } from 'rxjs/operators';
|
||||
import {
|
||||
ServerFacade,
|
||||
CaptureConfig,
|
||||
HeadlessChromiumDriverFactory,
|
||||
HeadlessChromiumDriver as HeadlessBrowser,
|
||||
} from '../../../../types';
|
||||
import {
|
||||
ElementsPositionAndAttribute,
|
||||
ScreenshotResults,
|
||||
ScreenshotObservableOpts,
|
||||
TimeRange,
|
||||
} from './types';
|
||||
|
||||
import { checkForToastMessage } from './check_for_toast';
|
||||
import { first, mergeMap, toArray } from 'rxjs/operators';
|
||||
import { ServerFacade, CaptureConfig, HeadlessChromiumDriverFactory } from '../../../../types';
|
||||
import { ScreenshotResults, ScreenshotObservableOpts } from './types';
|
||||
import { injectCustomCss } from './inject_css';
|
||||
import { openUrl } from './open_url';
|
||||
import { waitForRenderComplete } from './wait_for_render';
|
||||
|
@ -28,6 +16,7 @@ import { waitForElementsToBeInDOM } from './wait_for_dom_elements';
|
|||
import { getTimeRange } from './get_time_range';
|
||||
import { getElementPositionAndAttributes } from './get_element_position_data';
|
||||
import { getScreenshots } from './get_screenshots';
|
||||
import { scanPage } from './scan_page';
|
||||
import { skipTelemetry } from './skip_telemetry';
|
||||
|
||||
export function screenshotsObservableFactory(
|
||||
|
@ -39,108 +28,68 @@ export function screenshotsObservableFactory(
|
|||
|
||||
return function screenshotsObservable({
|
||||
logger,
|
||||
url,
|
||||
urls,
|
||||
conditionalHeaders,
|
||||
layout,
|
||||
browserTimezone,
|
||||
}: ScreenshotObservableOpts): Rx.Observable<ScreenshotResults> {
|
||||
const create$ = browserDriverFactory.create({
|
||||
viewport: layout.getBrowserViewport(),
|
||||
browserTimezone,
|
||||
});
|
||||
}: ScreenshotObservableOpts): Rx.Observable<ScreenshotResults[]> {
|
||||
const create$ = browserDriverFactory.createPage(
|
||||
{ viewport: layout.getBrowserViewport(), browserTimezone },
|
||||
logger
|
||||
);
|
||||
|
||||
// @ts-ignore this needs to be refactored to use less random type declaration and instead rely on structures that work with inference TODO
|
||||
return create$.pipe(
|
||||
mergeMap(({ driver$, exit$ }) => {
|
||||
const screenshot$ = driver$.pipe(
|
||||
mergeMap(
|
||||
(browser: HeadlessBrowser) => openUrl(browser, url, conditionalHeaders, logger),
|
||||
browser => browser
|
||||
),
|
||||
mergeMap(
|
||||
(browser: HeadlessBrowser) => skipTelemetry(browser, logger),
|
||||
browser => browser
|
||||
),
|
||||
mergeMap(
|
||||
(browser: HeadlessBrowser) => {
|
||||
logger.debug(
|
||||
'waiting for elements or items count attribute; or not found to interrupt'
|
||||
);
|
||||
return Rx.from(urls).pipe(
|
||||
mergeMap(url => {
|
||||
return create$.pipe(
|
||||
mergeMap(({ driver, exit$ }) => {
|
||||
const screenshot$ = Rx.of(driver).pipe(
|
||||
mergeMap(() => openUrl(driver, url, conditionalHeaders, logger)),
|
||||
mergeMap(() => skipTelemetry(driver, logger)),
|
||||
mergeMap(() => scanPage(driver, layout, logger)),
|
||||
mergeMap(() => getNumberOfItems(driver, layout, logger)),
|
||||
mergeMap(async itemsCount => {
|
||||
const viewport = layout.getViewport(itemsCount);
|
||||
await Promise.all([
|
||||
driver.setViewport(viewport, logger),
|
||||
waitForElementsToBeInDOM(driver, itemsCount, layout, logger),
|
||||
]);
|
||||
}),
|
||||
mergeMap(async () => {
|
||||
// Waiting till _after_ elements have rendered before injecting our CSS
|
||||
// allows for them to be displayed properly in many cases
|
||||
await injectCustomCss(driver, layout, logger);
|
||||
|
||||
// the dashboard is using the `itemsCountAttribute` attribute to let us
|
||||
// know how many items to expect since gridster incrementally adds panels
|
||||
// we have to use this hint to wait for all of them
|
||||
const renderSuccess = browser.waitForSelector(
|
||||
`${layout.selectors.renderComplete},[${layout.selectors.itemsCountAttribute}]`,
|
||||
{},
|
||||
logger
|
||||
);
|
||||
const renderError = checkForToastMessage(browser, layout, logger);
|
||||
return Rx.race(Rx.from(renderSuccess), Rx.from(renderError));
|
||||
},
|
||||
browser => browser
|
||||
),
|
||||
mergeMap(
|
||||
(browser: HeadlessBrowser) => getNumberOfItems(browser, layout, logger),
|
||||
(browser, itemsCount: number) => ({ browser, itemsCount })
|
||||
),
|
||||
mergeMap(
|
||||
async ({ browser, itemsCount }) => {
|
||||
logger.debug('setting viewport');
|
||||
const viewport = layout.getViewport(itemsCount);
|
||||
return await browser.setViewport(viewport, logger);
|
||||
},
|
||||
({ browser, itemsCount }) => ({ browser, itemsCount })
|
||||
),
|
||||
mergeMap(
|
||||
({ browser, itemsCount }) =>
|
||||
waitForElementsToBeInDOM(browser, itemsCount, layout, logger),
|
||||
({ browser }) => browser
|
||||
),
|
||||
mergeMap(
|
||||
browser => {
|
||||
// Waiting till _after_ elements have rendered before injecting our CSS
|
||||
// allows for them to be displayed properly in many cases
|
||||
return injectCustomCss(browser, layout, logger);
|
||||
},
|
||||
browser => browser
|
||||
),
|
||||
mergeMap(
|
||||
async browser => {
|
||||
if (layout.positionElements) {
|
||||
// position panel elements for print layout
|
||||
return await layout.positionElements(browser, logger);
|
||||
}
|
||||
},
|
||||
browser => browser
|
||||
),
|
||||
mergeMap(
|
||||
(browser: HeadlessBrowser) => {
|
||||
return waitForRenderComplete(captureConfig, browser, layout, logger);
|
||||
},
|
||||
browser => browser
|
||||
),
|
||||
mergeMap(
|
||||
browser => getTimeRange(browser, layout, logger),
|
||||
(browser, timeRange: TimeRange | null) => ({ browser, timeRange })
|
||||
),
|
||||
mergeMap(
|
||||
({ browser }) => getElementPositionAndAttributes(browser, layout),
|
||||
({ browser, timeRange }, elementsPositionAndAttributes: ElementsPositionAndAttribute[]) => {
|
||||
return { browser, timeRange, elementsPositionAndAttributes };
|
||||
} // prettier-ignore
|
||||
),
|
||||
mergeMap(
|
||||
({ browser, elementsPositionAndAttributes }) => {
|
||||
return getScreenshots({ browser, elementsPositionAndAttributes, logger });
|
||||
},
|
||||
({ timeRange }, screenshots) => ({ timeRange, screenshots })
|
||||
)
|
||||
if (layout.positionElements) {
|
||||
// position panel elements for print layout
|
||||
await layout.positionElements(driver, logger);
|
||||
}
|
||||
|
||||
await waitForRenderComplete(captureConfig, driver, layout, logger);
|
||||
}),
|
||||
mergeMap(() => getTimeRange(driver, layout, logger)),
|
||||
mergeMap(
|
||||
async (timeRange): Promise<ScreenshotResults> => {
|
||||
const elementsPositionAndAttributes = await getElementPositionAndAttributes(
|
||||
driver,
|
||||
layout
|
||||
);
|
||||
const screenshots = await getScreenshots({
|
||||
browser: driver,
|
||||
elementsPositionAndAttributes,
|
||||
logger,
|
||||
});
|
||||
|
||||
return { timeRange, screenshots };
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
return Rx.race(screenshot$, exit$);
|
||||
})
|
||||
);
|
||||
|
||||
return Rx.race(screenshot$, exit$);
|
||||
}),
|
||||
first()
|
||||
first(),
|
||||
toArray()
|
||||
);
|
||||
};
|
||||
}
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { HeadlessChromiumDriver } from '../../../../server/browsers/chromium/driver';
|
||||
import { LevelLogger } from '../../../../server/lib';
|
||||
import { LayoutInstance } from '../../layouts/layout';
|
||||
import { checkForToastMessage } from './check_for_toast';
|
||||
|
||||
export function scanPage(
|
||||
browser: HeadlessChromiumDriver,
|
||||
layout: LayoutInstance,
|
||||
logger: LevelLogger
|
||||
) {
|
||||
logger.debug('waiting for elements or items count attribute; or not found to interrupt');
|
||||
|
||||
// the dashboard is using the `itemsCountAttribute` attribute to let us
|
||||
// know how many items to expect since gridster incrementally adds panels
|
||||
// we have to use this hint to wait for all of them
|
||||
const renderSuccess = browser.waitForSelector(
|
||||
`${layout.selectors.renderComplete},[${layout.selectors.itemsCountAttribute}]`,
|
||||
{},
|
||||
logger
|
||||
);
|
||||
const renderError = checkForToastMessage(browser, layout, logger);
|
||||
return Rx.race(Rx.from(renderSuccess), Rx.from(renderError));
|
||||
}
|
|
@ -10,7 +10,7 @@ import { LayoutInstance } from '../../layouts/layout';
|
|||
|
||||
export interface ScreenshotObservableOpts {
|
||||
logger: LevelLogger;
|
||||
url: string;
|
||||
urls: string[];
|
||||
conditionalHeaders: ConditionalHeaders;
|
||||
layout: LayoutInstance;
|
||||
browserTimezone: string;
|
||||
|
@ -36,6 +36,6 @@ export interface Screenshot {
|
|||
}
|
||||
|
||||
export interface ScreenshotResults {
|
||||
timeRange: TimeRange;
|
||||
timeRange: TimeRange | null;
|
||||
screenshots: Screenshot[];
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ beforeEach(() => {
|
|||
'server.port': 5601,
|
||||
};
|
||||
mockServer = {
|
||||
expose: () => {},
|
||||
expose: () => {}, // NOTE: this is for oncePerServer
|
||||
config: memoize(() => ({ get: jest.fn() })),
|
||||
info: {
|
||||
protocol: 'http',
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { mergeMap, catchError, map, takeUntil } from 'rxjs/operators';
|
||||
import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators';
|
||||
import { PLUGIN_ID, PNG_JOB_TYPE } from '../../../../common/constants';
|
||||
import {
|
||||
ServerFacade,
|
||||
|
@ -32,18 +32,14 @@ export const executeJobFactory: QueuedPngExecutorFactory = function executeJobFa
|
|||
const generatePngObservable = generatePngObservableFactory(server, browserDriverFactory);
|
||||
const logger = LevelLogger.createForServer(server, [PLUGIN_ID, PNG_JOB_TYPE, 'execute']);
|
||||
|
||||
return function executeJob(
|
||||
jobId: string,
|
||||
jobToExecute: JobDocPayloadPNG,
|
||||
cancellationToken: any
|
||||
) {
|
||||
return function executeJob(jobId: string, job: JobDocPayloadPNG, cancellationToken: any) {
|
||||
const jobLogger = logger.clone([jobId]);
|
||||
const process$ = Rx.of({ job: jobToExecute, server, logger }).pipe(
|
||||
mergeMap(decryptJobHeaders),
|
||||
map(omitBlacklistedHeaders),
|
||||
map(getConditionalHeaders),
|
||||
mergeMap(getFullUrls),
|
||||
mergeMap(({ job, conditionalHeaders, urls }) => {
|
||||
const process$ = Rx.of(1).pipe(
|
||||
mergeMap(() => decryptJobHeaders({ server, job, logger })),
|
||||
map(decryptedHeaders => omitBlacklistedHeaders({ job, decryptedHeaders })),
|
||||
map(filteredHeaders => getConditionalHeaders({ server, job, filteredHeaders })),
|
||||
mergeMap(conditionalHeaders => {
|
||||
const urls = getFullUrls({ server, job });
|
||||
const hashUrl = urls[0];
|
||||
return generatePngObservable(
|
||||
jobLogger,
|
||||
|
|
|
@ -32,19 +32,17 @@ export function generatePngObservableFactory(
|
|||
const layout = new PreserveLayout(layoutParams.dimensions);
|
||||
const screenshots$ = screenshotsObservable({
|
||||
logger,
|
||||
url,
|
||||
urls: [url],
|
||||
conditionalHeaders,
|
||||
layout,
|
||||
browserTimezone,
|
||||
}).pipe(
|
||||
map(urlScreenshots => {
|
||||
if (urlScreenshots.screenshots.length !== 1) {
|
||||
throw new Error(
|
||||
`Expected there to be 1 screenshot, but there are ${urlScreenshots.screenshots.length}`
|
||||
);
|
||||
map(([{ screenshots }]) => {
|
||||
if (screenshots.length !== 1) {
|
||||
throw new Error(`Expected there to be 1 screenshot, but there are ${screenshots.length}`);
|
||||
}
|
||||
|
||||
return urlScreenshots.screenshots[0].base64EncodedData;
|
||||
return screenshots[0].base64EncodedData;
|
||||
})
|
||||
);
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { mergeMap, catchError, map, takeUntil } from 'rxjs/operators';
|
||||
import { catchError, map, mergeMap, takeUntil } from 'rxjs/operators';
|
||||
import {
|
||||
ServerFacade,
|
||||
ExecuteJobFactory,
|
||||
|
@ -33,33 +33,28 @@ export const executeJobFactory: QueuedPdfExecutorFactory = function executeJobFa
|
|||
const generatePdfObservable = generatePdfObservableFactory(server, browserDriverFactory);
|
||||
const logger = LevelLogger.createForServer(server, [PLUGIN_ID, PDF_JOB_TYPE, 'execute']);
|
||||
|
||||
return function executeJob(
|
||||
jobId: string,
|
||||
jobToExecute: JobDocPayloadPDF,
|
||||
cancellationToken: any
|
||||
) {
|
||||
return function executeJob(jobId: string, job: JobDocPayloadPDF, cancellationToken: any) {
|
||||
const jobLogger = logger.clone([jobId]);
|
||||
|
||||
const process$ = Rx.of({ job: jobToExecute, server, logger }).pipe(
|
||||
mergeMap(decryptJobHeaders),
|
||||
map(omitBlacklistedHeaders),
|
||||
map(getConditionalHeaders),
|
||||
mergeMap(getCustomLogo),
|
||||
mergeMap(getFullUrls),
|
||||
mergeMap(
|
||||
({ job, conditionalHeaders, logo, urls }): Rx.Observable<Buffer> => {
|
||||
const { browserTimezone, layout } = jobToExecute;
|
||||
return generatePdfObservable(
|
||||
jobLogger,
|
||||
job.title,
|
||||
urls,
|
||||
browserTimezone,
|
||||
conditionalHeaders,
|
||||
layout,
|
||||
logo
|
||||
);
|
||||
}
|
||||
),
|
||||
const process$ = Rx.of(1).pipe(
|
||||
mergeMap(() => decryptJobHeaders({ server, job, logger })),
|
||||
map(decryptedHeaders => omitBlacklistedHeaders({ job, decryptedHeaders })),
|
||||
map(filteredHeaders => getConditionalHeaders({ server, job, filteredHeaders })),
|
||||
mergeMap(conditionalHeaders => getCustomLogo({ server, job, conditionalHeaders })),
|
||||
mergeMap(({ logo, conditionalHeaders }) => {
|
||||
const urls = getFullUrls({ server, job });
|
||||
|
||||
const { browserTimezone, layout, title } = job;
|
||||
return generatePdfObservable(
|
||||
jobLogger,
|
||||
title,
|
||||
urls,
|
||||
browserTimezone,
|
||||
conditionalHeaders,
|
||||
layout,
|
||||
logo
|
||||
);
|
||||
}),
|
||||
map((buffer: Buffer) => ({
|
||||
content_type: 'application/pdf',
|
||||
content: buffer.toString('base64'),
|
||||
|
@ -72,7 +67,6 @@ export const executeJobFactory: QueuedPdfExecutorFactory = function executeJobFa
|
|||
);
|
||||
|
||||
const stop$ = Rx.fromEventPattern(cancellationToken.on);
|
||||
|
||||
return process$.pipe(takeUntil(stop$)).toPromise();
|
||||
};
|
||||
};
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { toArray, mergeMap } from 'rxjs/operators';
|
||||
import { mergeMap } from 'rxjs/operators';
|
||||
import { groupBy } from 'lodash';
|
||||
import { LevelLogger } from '../../../../server/lib';
|
||||
import { ServerFacade, HeadlessChromiumDriverFactory, ConditionalHeaders } from '../../../../types';
|
||||
|
@ -31,7 +31,6 @@ export function generatePdfObservableFactory(
|
|||
browserDriverFactory: HeadlessChromiumDriverFactory
|
||||
) {
|
||||
const screenshotsObservable = screenshotsObservableFactory(server, browserDriverFactory);
|
||||
const captureConcurrency = 1;
|
||||
|
||||
return function generatePdfObservable(
|
||||
logger: LevelLogger,
|
||||
|
@ -41,15 +40,16 @@ export function generatePdfObservableFactory(
|
|||
conditionalHeaders: ConditionalHeaders,
|
||||
layoutParams: LayoutParams,
|
||||
logo?: string
|
||||
) {
|
||||
): Rx.Observable<Buffer> {
|
||||
const layout = createLayout(server, layoutParams) as LayoutInstance;
|
||||
const screenshots$ = Rx.from(urls).pipe(
|
||||
mergeMap(
|
||||
url => screenshotsObservable({ logger, url, conditionalHeaders, layout, browserTimezone }),
|
||||
captureConcurrency
|
||||
),
|
||||
toArray(),
|
||||
mergeMap(async (urlScreenshots: ScreenshotResults[]) => {
|
||||
const screenshots$ = screenshotsObservable({
|
||||
logger,
|
||||
urls,
|
||||
conditionalHeaders,
|
||||
layout,
|
||||
browserTimezone,
|
||||
}).pipe(
|
||||
mergeMap(async urlScreenshots => {
|
||||
const pdfOutput = pdf.create(layout, logo);
|
||||
|
||||
if (title) {
|
||||
|
@ -68,8 +68,7 @@ export function generatePdfObservableFactory(
|
|||
});
|
||||
|
||||
pdfOutput.generate();
|
||||
const buffer = await pdfOutput.getBuffer();
|
||||
return buffer;
|
||||
return await pdfOutput.getBuffer();
|
||||
})
|
||||
);
|
||||
|
||||
|
|
|
@ -31,10 +31,11 @@ type queueTimeout = number;
|
|||
|
||||
export class HeadlessChromiumDriverFactory {
|
||||
private binaryPath: binaryPath;
|
||||
private logger: Logger;
|
||||
private browserConfig: BrowserConfig;
|
||||
private queueTimeout: queueTimeout;
|
||||
private networkPolicy: NetworkPolicy;
|
||||
private userDataDir: string;
|
||||
private getChromiumArgs: (viewport: BrowserConfig['viewport']) => string[];
|
||||
|
||||
constructor(
|
||||
binaryPath: binaryPath,
|
||||
|
@ -46,23 +47,30 @@ export class HeadlessChromiumDriverFactory {
|
|||
this.binaryPath = binaryPath;
|
||||
this.browserConfig = browserConfig;
|
||||
this.queueTimeout = queueTimeout;
|
||||
this.logger = logger;
|
||||
this.networkPolicy = networkPolicy;
|
||||
|
||||
this.userDataDir = fs.mkdtempSync(path.join(os.tmpdir(), 'chromium-'));
|
||||
this.getChromiumArgs = (viewport: BrowserConfig['viewport']) =>
|
||||
args({
|
||||
userDataDir: this.userDataDir,
|
||||
viewport,
|
||||
disableSandbox: this.browserConfig.disableSandbox,
|
||||
proxy: this.browserConfig.proxy,
|
||||
});
|
||||
}
|
||||
|
||||
type = 'chromium';
|
||||
|
||||
test({ viewport }: { viewport: BrowserConfig['viewport'] }, logger: Logger) {
|
||||
const userDataDir = fs.mkdtempSync(path.join(os.tmpdir(), 'chromium-'));
|
||||
test(logger: Logger) {
|
||||
const chromiumArgs = args({
|
||||
userDataDir,
|
||||
viewport,
|
||||
userDataDir: this.userDataDir,
|
||||
viewport: { width: 800, height: 600 },
|
||||
disableSandbox: this.browserConfig.disableSandbox,
|
||||
proxy: this.browserConfig.proxy,
|
||||
});
|
||||
|
||||
return puppeteerLaunch({
|
||||
userDataDir,
|
||||
userDataDir: this.userDataDir,
|
||||
executablePath: this.binaryPath,
|
||||
ignoreHTTPSErrors: true,
|
||||
args: chromiumArgs,
|
||||
|
@ -76,33 +84,25 @@ export class HeadlessChromiumDriverFactory {
|
|||
});
|
||||
}
|
||||
|
||||
create({
|
||||
viewport,
|
||||
browserTimezone,
|
||||
}: {
|
||||
viewport: BrowserConfig['viewport'];
|
||||
browserTimezone: string;
|
||||
}): Rx.Observable<{
|
||||
driver$: Rx.Observable<HeadlessChromiumDriver>;
|
||||
exit$: Rx.Observable<never>;
|
||||
}> {
|
||||
/*
|
||||
* Return an observable to objects which will drive screenshot capture for a page
|
||||
*/
|
||||
createPage(
|
||||
{ viewport, browserTimezone }: { viewport: BrowserConfig['viewport']; browserTimezone: string },
|
||||
pLogger: Logger
|
||||
): Rx.Observable<{ driver: HeadlessChromiumDriver; exit$: Rx.Observable<never> }> {
|
||||
return Rx.Observable.create(async (observer: InnerSubscriber<any, any>) => {
|
||||
this.logger.debug(`Creating browser driver factory`);
|
||||
const logger = pLogger.clone(['browser-driver']);
|
||||
logger.info(`Creating browser page driver`);
|
||||
|
||||
const userDataDir = fs.mkdtempSync(path.join(os.tmpdir(), 'chromium-'));
|
||||
const chromiumArgs = args({
|
||||
userDataDir,
|
||||
viewport,
|
||||
disableSandbox: this.browserConfig.disableSandbox,
|
||||
proxy: this.browserConfig.proxy,
|
||||
});
|
||||
const chromiumArgs = this.getChromiumArgs(viewport);
|
||||
|
||||
let browser: Browser;
|
||||
let page: Page;
|
||||
try {
|
||||
browser = await puppeteerLaunch({
|
||||
pipe: !this.browserConfig.inspect,
|
||||
userDataDir,
|
||||
userDataDir: this.userDataDir,
|
||||
executablePath: this.binaryPath,
|
||||
ignoreHTTPSErrors: true,
|
||||
args: chromiumArgs,
|
||||
|
@ -119,7 +119,7 @@ export class HeadlessChromiumDriverFactory {
|
|||
// "TimeoutError: waiting for selector ".application" failed: timeout 30000ms exceeded"
|
||||
page.setDefaultTimeout(this.queueTimeout);
|
||||
|
||||
this.logger.debug(`Browser driver factory created`);
|
||||
logger.debug(`Browser page driver created`);
|
||||
} catch (err) {
|
||||
observer.error(new Error(`Error spawning Chromium browser: [${err}]`));
|
||||
throw err;
|
||||
|
@ -130,12 +130,12 @@ export class HeadlessChromiumDriverFactory {
|
|||
await browser.close();
|
||||
},
|
||||
};
|
||||
const { terminate$ } = safeChildProcess(this.logger, childProcess);
|
||||
const { terminate$ } = safeChildProcess(logger, childProcess);
|
||||
|
||||
// this is adding unsubscribe logic to our observer
|
||||
// so that if our observer unsubscribes, we terminate our child-process
|
||||
observer.add(() => {
|
||||
this.logger.debug(`The browser process observer has unsubscribed. Closing the browser...`);
|
||||
logger.debug(`The browser process observer has unsubscribed. Closing the browser...`);
|
||||
childProcess.kill(); // ignore async
|
||||
});
|
||||
|
||||
|
@ -144,7 +144,7 @@ export class HeadlessChromiumDriverFactory {
|
|||
terminate$
|
||||
.pipe(
|
||||
tap(signal => {
|
||||
this.logger.debug(`Termination signal received: ${signal}`);
|
||||
logger.debug(`Termination signal received: ${signal}`);
|
||||
}),
|
||||
ignoreElements()
|
||||
)
|
||||
|
@ -152,33 +152,40 @@ export class HeadlessChromiumDriverFactory {
|
|||
);
|
||||
|
||||
// taps the browser log streams and combine them to Kibana logs
|
||||
this.getBrowserLogger(page).subscribe();
|
||||
this.getProcessLogger(browser).subscribe();
|
||||
this.getBrowserLogger(page, logger).subscribe();
|
||||
this.getProcessLogger(browser, logger).subscribe();
|
||||
|
||||
const driver$ = Rx.of(new HeadlessChromiumDriver(page, { inspect: this.browserConfig.inspect, networkPolicy: this.networkPolicy })); // prettier-ignore
|
||||
// HeadlessChromiumDriver: object to "drive" a browser page
|
||||
const driver = new HeadlessChromiumDriver(page, {
|
||||
inspect: this.browserConfig.inspect,
|
||||
networkPolicy: this.networkPolicy,
|
||||
});
|
||||
|
||||
// Rx.Observable<never>: stream to interrupt page capture
|
||||
const exit$ = this.getPageExit(browser, page);
|
||||
|
||||
observer.next({ driver$, exit$ });
|
||||
observer.next({ driver, exit$ });
|
||||
|
||||
// unsubscribe logic makes a best-effort attempt to delete the user data directory used by chromium
|
||||
observer.add(() => {
|
||||
this.logger.debug(`deleting chromium user data directory at [${userDataDir}]`);
|
||||
const userDataDir = this.userDataDir;
|
||||
logger.debug(`deleting chromium user data directory at [${userDataDir}]`);
|
||||
// the unsubscribe function isn't `async` so we're going to make our best effort at
|
||||
// deleting the userDataDir and if it fails log an error.
|
||||
del(userDataDir).catch(error => {
|
||||
this.logger.error(`error deleting user data directory at [${userDataDir}]: [${error}]`);
|
||||
logger.error(`error deleting user data directory at [${userDataDir}]: [${error}]`);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
getBrowserLogger(page: Page): Rx.Observable<void> {
|
||||
getBrowserLogger(page: Page, logger: Logger): Rx.Observable<void> {
|
||||
const consoleMessages$ = Rx.fromEvent<ConsoleMessage>(page, 'console').pipe(
|
||||
map(line => {
|
||||
if (line.type() === 'error') {
|
||||
this.logger.error(line.text(), ['headless-browser-console']);
|
||||
logger.error(line.text(), ['headless-browser-console']);
|
||||
} else {
|
||||
this.logger.debug(line.text(), [`headless-browser-console:${line.type()}`]);
|
||||
logger.debug(line.text(), [`headless-browser-console:${line.type()}`]);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
@ -187,7 +194,7 @@ export class HeadlessChromiumDriverFactory {
|
|||
map(req => {
|
||||
const failure = req.failure && req.failure();
|
||||
if (failure) {
|
||||
this.logger.warning(
|
||||
logger.warning(
|
||||
`Request to [${req.url()}] failed! [${failure.errorText}]. This error will be ignored.`
|
||||
);
|
||||
}
|
||||
|
@ -197,7 +204,7 @@ export class HeadlessChromiumDriverFactory {
|
|||
return Rx.merge(consoleMessages$, pageRequestFailed$);
|
||||
}
|
||||
|
||||
getProcessLogger(browser: Browser) {
|
||||
getProcessLogger(browser: Browser, logger: Logger): Rx.Observable<void> {
|
||||
const childProcess = browser.process();
|
||||
// NOTE: The browser driver can not observe stdout and stderr of the child process
|
||||
// Puppeteer doesn't give a handle to the original ChildProcess object
|
||||
|
@ -206,7 +213,7 @@ export class HeadlessChromiumDriverFactory {
|
|||
// just log closing of the process
|
||||
const processClose$ = Rx.fromEvent<void>(childProcess, 'close').pipe(
|
||||
tap(() => {
|
||||
this.logger.debug('child process closed', ['headless-browser-process']);
|
||||
logger.debug('child process closed', ['headless-browser-process']);
|
||||
})
|
||||
);
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import * as chromiumDefinition from './chromium';
|
||||
|
||||
export { ensureAllBrowsersDownloaded } from './download';
|
||||
|
|
|
@ -18,14 +18,12 @@ export const validateBrowser = async (
|
|||
logger: Logger
|
||||
) => {
|
||||
if (browserFactory.type === BROWSER_TYPE) {
|
||||
return browserFactory
|
||||
.test({ viewport: { width: 800, height: 600 } }, logger)
|
||||
.then((browser: Browser | null) => {
|
||||
if (browser && browser.close) {
|
||||
browser.close();
|
||||
} else {
|
||||
throw new Error('Could not close browser client handle!');
|
||||
}
|
||||
});
|
||||
return browserFactory.test(logger).then((browser: Browser | null) => {
|
||||
if (browser && browser.close) {
|
||||
browser.close();
|
||||
} else {
|
||||
throw new Error('Could not close browser client handle!');
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
|
|
@ -28,6 +28,7 @@ export default async function({ readConfigFile }) {
|
|||
'["info","warning","error","fatal","optimize","reporting"]',
|
||||
'--xpack.endpoint.enabled=true',
|
||||
'--xpack.reporting.csv.enablePanelActionDownload=true',
|
||||
'--xpack.reporting.capture.maxAttempts=1',
|
||||
'--xpack.security.session.idleTimeout=3600000',
|
||||
'--xpack.spaces.enabled=false',
|
||||
],
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue