mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Reporting] Refactoring of ExportTypes (#162276)
## Summary This PR refactors the getExportType() implementation in the reporting plugin. With these changes, export types are now classes that are registered into the export_types_registry in reporting core. Export Type Classes: - CsvSearchSourceExportType - CsvSearchSourceImmediateExportType (this is not registered but can be called by getCsvSearchSourceImmediate() in core.ts) - CsvV2ExportType - PngV1ExportType (deprecated) - PngExportType - PdfV1ExportType (deprecated) - PdfExportType ### Checklist - [x] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios --------- Co-authored-by: Timothy Sullivan <tsullivan@elastic.co>
This commit is contained in:
parent
e3cc4a9a22
commit
71ebc38c42
65 changed files with 1563 additions and 1754 deletions
|
@ -5,15 +5,13 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { ReportingStart } from '@kbn/reporting-plugin/server/types';
|
||||
import { getCanvasFeature } from './feature';
|
||||
import { ReportingStart } from '@kbn/reporting-plugin/server/types';
|
||||
import { reportingMock } from '@kbn/reporting-plugin/server/mocks';
|
||||
|
||||
let mockReportingPlugin: ReportingStart;
|
||||
beforeEach(() => {
|
||||
mockReportingPlugin = {
|
||||
usesUiCapabilities: () => false,
|
||||
registerExportTypes: () => {},
|
||||
};
|
||||
mockReportingPlugin = reportingMock.createStart();
|
||||
});
|
||||
|
||||
it('Provides a feature declaration ', () => {
|
||||
|
@ -86,10 +84,7 @@ it('Provides a feature declaration ', () => {
|
|||
});
|
||||
|
||||
it(`Calls on Reporting whether to include Generate PDF as a sub-feature`, () => {
|
||||
mockReportingPlugin = {
|
||||
usesUiCapabilities: () => true,
|
||||
registerExportTypes: () => {},
|
||||
};
|
||||
mockReportingPlugin.usesUiCapabilities = () => true;
|
||||
expect(getCanvasFeature({ reporting: mockReportingPlugin })).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"app": Array [
|
||||
|
|
|
@ -55,12 +55,12 @@ export const USES_HEADLESS_JOB_TYPES = [
|
|||
export const DEPRECATED_JOB_TYPES = [CSV_JOB_TYPE_DEPRECATED];
|
||||
|
||||
// Licenses
|
||||
export const LICENSE_TYPE_TRIAL = 'trial';
|
||||
export const LICENSE_TYPE_BASIC = 'basic';
|
||||
export const LICENSE_TYPE_CLOUD_STANDARD = 'standard';
|
||||
export const LICENSE_TYPE_GOLD = 'gold';
|
||||
export const LICENSE_TYPE_PLATINUM = 'platinum';
|
||||
export const LICENSE_TYPE_ENTERPRISE = 'enterprise';
|
||||
export const LICENSE_TYPE_TRIAL = 'trial' as const;
|
||||
export const LICENSE_TYPE_BASIC = 'basic' as const;
|
||||
export const LICENSE_TYPE_CLOUD_STANDARD = 'standard' as const;
|
||||
export const LICENSE_TYPE_GOLD = 'gold' as const;
|
||||
export const LICENSE_TYPE_PLATINUM = 'platinum' as const;
|
||||
export const LICENSE_TYPE_ENTERPRISE = 'enterprise' as const;
|
||||
|
||||
// Routes
|
||||
export const API_BASE_URL = '/api/reporting'; // "Generation URL" from share menu
|
||||
|
|
|
@ -8,20 +8,16 @@
|
|||
import type {
|
||||
CoreSetup,
|
||||
DocLinksServiceSetup,
|
||||
FakeRawRequest,
|
||||
Headers,
|
||||
IBasePath,
|
||||
IClusterClient,
|
||||
KibanaRequest,
|
||||
Logger,
|
||||
PackageInfo,
|
||||
PluginInitializerContext,
|
||||
SavedObjectsClientContract,
|
||||
SavedObjectsServiceStart,
|
||||
StatusServiceSetup,
|
||||
UiSettingsServiceStart,
|
||||
} from '@kbn/core/server';
|
||||
import { CoreKibanaRequest } from '@kbn/core/server';
|
||||
import type { PluginStart as DataPluginStart } from '@kbn/data-plugin/server';
|
||||
import type { DiscoverServerPluginStart } from '@kbn/discover-plugin/server';
|
||||
import type { PluginSetupContract as FeaturesPluginSetup } from '@kbn/features-plugin/server';
|
||||
|
@ -34,7 +30,7 @@ import {
|
|||
ScreenshottingStart,
|
||||
} from '@kbn/screenshotting-plugin/server';
|
||||
import type { SecurityPluginSetup, SecurityPluginStart } from '@kbn/security-plugin/server';
|
||||
import { DEFAULT_SPACE_ID } from '@kbn/spaces-plugin/common/constants';
|
||||
import { DEFAULT_SPACE_ID } from '@kbn/spaces-plugin/common';
|
||||
import type { SpacesPluginSetup } from '@kbn/spaces-plugin/server';
|
||||
import type {
|
||||
TaskManagerSetupContract,
|
||||
|
@ -46,11 +42,19 @@ import { map, switchMap, take } from 'rxjs/operators';
|
|||
import type { ReportingSetup } from '.';
|
||||
import { REPORTING_REDIRECT_LOCATOR_STORE_KEY } from '../common/constants';
|
||||
import { createConfig, ReportingConfigType } from './config';
|
||||
import { checkLicense, getExportTypesRegistry } from './lib';
|
||||
import { CsvSearchSourceExportType } from './export_types/csv_searchsource';
|
||||
import { CsvV2ExportType } from './export_types/csv_v2';
|
||||
import { PdfV1ExportType } from './export_types/printable_pdf';
|
||||
import { PdfExportType } from './export_types/printable_pdf_v2';
|
||||
import { PngV1ExportType } from './export_types/png';
|
||||
import { PngExportType } from './export_types/png_v2';
|
||||
import { checkLicense, ExportTypesRegistry } from './lib';
|
||||
import { reportingEventLoggerFactory } from './lib/event_logger/logger';
|
||||
import type { IReport, ReportingStore } from './lib/store';
|
||||
import { ExecuteReportTask, MonitorReportsTask, ReportTaskParams } from './lib/tasks';
|
||||
import type { PdfScreenshotOptions, PngScreenshotOptions, ReportingPluginRouter } from './types';
|
||||
import { CsvSearchSourceImmediateExportType } from './export_types/csv_searchsource_immediate';
|
||||
import { ExportType } from './export_types/common';
|
||||
|
||||
export interface ReportingInternalSetup {
|
||||
basePath: Pick<IBasePath, 'set'>;
|
||||
|
@ -102,11 +106,12 @@ export class ReportingCore {
|
|||
private readonly pluginSetup$ = new Rx.ReplaySubject<boolean>(); // observe async background setupDeps each are done
|
||||
private readonly pluginStart$ = new Rx.ReplaySubject<ReportingInternalStart>(); // observe async background startDeps
|
||||
private deprecatedAllowedRoles: string[] | false = false; // DEPRECATED. If `false`, the deprecated features have been disableed
|
||||
private exportTypesRegistry = getExportTypesRegistry();
|
||||
private executeTask: ExecuteReportTask;
|
||||
private monitorTask: MonitorReportsTask;
|
||||
private config: ReportingConfigType;
|
||||
private executing: Set<string>;
|
||||
private exportTypes: ExportType[] = [];
|
||||
private exportTypesRegistry = new ExportTypesRegistry();
|
||||
|
||||
public getContract: () => ReportingSetup;
|
||||
|
||||
|
@ -121,6 +126,21 @@ export class ReportingCore {
|
|||
const config = createConfig(core, context.config.get<ReportingConfigType>(), logger);
|
||||
this.config = config;
|
||||
|
||||
// Export Type declarations
|
||||
this.exportTypes.push(
|
||||
new CsvSearchSourceExportType(this.core, this.config, this.logger, this.context)
|
||||
);
|
||||
this.exportTypes.push(new CsvV2ExportType(this.core, this.config, this.logger, this.context));
|
||||
this.exportTypes.push(new PdfExportType(this.core, this.config, this.logger, this.context));
|
||||
this.exportTypes.push(new PngExportType(this.core, this.config, this.logger, this.context));
|
||||
// deprecated export types for tests
|
||||
this.exportTypes.push(new PdfV1ExportType(this.core, this.config, this.logger, this.context));
|
||||
this.exportTypes.push(new PngV1ExportType(this.core, this.config, this.logger, this.context));
|
||||
|
||||
this.exportTypes.forEach((et) => {
|
||||
this.exportTypesRegistry.register(et);
|
||||
});
|
||||
|
||||
this.deprecatedAllowedRoles = config.roles.enabled ? config.roles.allow : false;
|
||||
this.executeTask = new ExecuteReportTask(this, config, this.logger);
|
||||
this.monitorTask = new MonitorReportsTask(this, config, this.logger);
|
||||
|
@ -128,6 +148,8 @@ export class ReportingCore {
|
|||
this.getContract = () => ({
|
||||
usesUiCapabilities: () => config.roles.enabled === false,
|
||||
registerExportTypes: (id) => id,
|
||||
getScreenshots: this.getScreenshots.bind(this),
|
||||
getSpaceId: this.getSpaceId.bind(this),
|
||||
});
|
||||
|
||||
this.executing = new Set();
|
||||
|
@ -144,6 +166,10 @@ export class ReportingCore {
|
|||
this.pluginSetup$.next(true); // trigger the observer
|
||||
this.pluginSetupDeps = setupDeps; // cache
|
||||
|
||||
this.exportTypes.forEach((et) => {
|
||||
et.setup(setupDeps);
|
||||
});
|
||||
|
||||
const { executeTask, monitorTask } = this;
|
||||
setupDeps.taskManager.registerTaskDefinitions({
|
||||
[executeTask.TYPE]: executeTask.getTaskDefinition(),
|
||||
|
@ -158,6 +184,10 @@ export class ReportingCore {
|
|||
this.pluginStart$.next(startDeps); // trigger the observer
|
||||
this.pluginStartDeps = startDeps; // cache
|
||||
|
||||
this.exportTypes.forEach((et) => {
|
||||
et.start({ ...startDeps, reporting: this.getContract() });
|
||||
});
|
||||
|
||||
const { taskManager } = startDeps;
|
||||
const { executeTask, monitorTask } = this;
|
||||
// enable this instance to generate reports and to monitor for pending reports
|
||||
|
@ -322,63 +352,6 @@ export class ReportingCore {
|
|||
return this.pluginSetupDeps;
|
||||
}
|
||||
|
||||
private async getSavedObjectsClient(request: KibanaRequest) {
|
||||
const { savedObjects } = await this.getPluginStartDeps();
|
||||
return savedObjects.getScopedClient(request) as SavedObjectsClientContract;
|
||||
}
|
||||
|
||||
public async getUiSettingsServiceFactory(savedObjectsClient: SavedObjectsClientContract) {
|
||||
const { uiSettings: uiSettingsService } = await this.getPluginStartDeps();
|
||||
const scopedUiSettingsService = uiSettingsService.asScopedToClient(savedObjectsClient);
|
||||
return scopedUiSettingsService;
|
||||
}
|
||||
|
||||
public getSpaceId(request: KibanaRequest, logger = this.logger): string | undefined {
|
||||
const spacesService = this.getPluginSetupDeps().spaces?.spacesService;
|
||||
if (spacesService) {
|
||||
const spaceId = spacesService?.getSpaceId(request);
|
||||
|
||||
if (spaceId !== DEFAULT_SPACE_ID) {
|
||||
logger.info(`Request uses Space ID: ${spaceId}`);
|
||||
return spaceId;
|
||||
} else {
|
||||
logger.debug(`Request uses default Space`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public getFakeRequest(
|
||||
headers: Headers,
|
||||
spaceId: string | undefined,
|
||||
logger = this.logger
|
||||
): KibanaRequest {
|
||||
const rawRequest: FakeRawRequest = {
|
||||
headers,
|
||||
path: '/',
|
||||
};
|
||||
const fakeRequest = CoreKibanaRequest.from(rawRequest);
|
||||
|
||||
const spacesService = this.getPluginSetupDeps().spaces?.spacesService;
|
||||
if (spacesService) {
|
||||
if (spaceId && spaceId !== DEFAULT_SPACE_ID) {
|
||||
logger.info(`Generating request for space: ${spaceId}`);
|
||||
this.getPluginSetupDeps().basePath.set(fakeRequest, `/s/${spaceId}`);
|
||||
}
|
||||
}
|
||||
|
||||
return fakeRequest;
|
||||
}
|
||||
|
||||
public async getUiSettingsClient(request: KibanaRequest, logger = this.logger) {
|
||||
const spacesService = this.getPluginSetupDeps().spaces?.spacesService;
|
||||
const spaceId = this.getSpaceId(request, logger);
|
||||
if (spacesService && spaceId) {
|
||||
logger.info(`Creating UI Settings Client for space: ${spaceId}`);
|
||||
}
|
||||
const savedObjectsClient = await this.getSavedObjectsClient(request);
|
||||
return await this.getUiSettingsServiceFactory(savedObjectsClient);
|
||||
}
|
||||
|
||||
public async getDataViewsService(request: KibanaRequest) {
|
||||
const { savedObjects } = await this.getPluginStartDeps();
|
||||
const savedObjectsClient = savedObjects.getScopedClient(request);
|
||||
|
@ -399,6 +372,20 @@ export class ReportingCore {
|
|||
return startDeps.esClient;
|
||||
}
|
||||
|
||||
public getSpaceId(request: KibanaRequest, logger = this.logger): string | undefined {
|
||||
const spacesService = this.getPluginSetupDeps().spaces?.spacesService;
|
||||
if (spacesService) {
|
||||
const spaceId = spacesService?.getSpaceId(request);
|
||||
|
||||
if (spaceId !== DEFAULT_SPACE_ID) {
|
||||
logger.info(`Request uses Space ID: ${spaceId}`);
|
||||
return spaceId;
|
||||
} else {
|
||||
logger.debug(`Request uses default Space`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public getScreenshots(options: PdfScreenshotOptions): Rx.Observable<PdfScreenshotResult>;
|
||||
public getScreenshots(options: PngScreenshotOptions): Rx.Observable<PngScreenshotResult>;
|
||||
public getScreenshots(
|
||||
|
@ -434,4 +421,18 @@ export class ReportingCore {
|
|||
const ReportingEventLogger = reportingEventLoggerFactory(this.logger);
|
||||
return new ReportingEventLogger(report, task);
|
||||
}
|
||||
|
||||
public async getCsvSearchSourceImmediate() {
|
||||
const startDeps = await this.getPluginStartDeps();
|
||||
|
||||
const csvImmediateExport = new CsvSearchSourceImmediateExportType(
|
||||
this.core,
|
||||
this.config,
|
||||
this.logger,
|
||||
this.context
|
||||
);
|
||||
csvImmediateExport.setup(this.getPluginSetupDeps());
|
||||
csvImmediateExport.start({ ...startDeps, reporting: this.getContract() });
|
||||
return csvImmediateExport;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,140 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
IBasePath,
|
||||
Headers,
|
||||
Logger,
|
||||
CoreKibanaRequest,
|
||||
CoreSetup,
|
||||
FakeRawRequest,
|
||||
HttpServiceSetup,
|
||||
KibanaRequest,
|
||||
PluginInitializerContext,
|
||||
SavedObjectsClientContract,
|
||||
SavedObjectsServiceStart,
|
||||
UiSettingsServiceStart,
|
||||
IClusterClient,
|
||||
} from '@kbn/core/server';
|
||||
import { LicenseType } from '@kbn/licensing-plugin/common/types';
|
||||
import { ScreenshottingStart } from '@kbn/screenshotting-plugin/server';
|
||||
import { DEFAULT_SPACE_ID } from '@kbn/spaces-plugin/common';
|
||||
import { SpacesPluginSetup } from '@kbn/spaces-plugin/server';
|
||||
import { ReportingConfigType } from '../../config';
|
||||
import { ReportingServerInfo } from '../../core';
|
||||
import { CreateJobFn, ReportingStart, RunTaskFn } from '../../types';
|
||||
|
||||
export interface BaseExportTypeSetupDeps {
|
||||
basePath: Pick<IBasePath, 'set'>;
|
||||
spaces?: SpacesPluginSetup;
|
||||
}
|
||||
|
||||
export interface BaseExportTypeStartDeps {
|
||||
savedObjects: SavedObjectsServiceStart;
|
||||
uiSettings: UiSettingsServiceStart;
|
||||
esClient: IClusterClient;
|
||||
screenshotting: ScreenshottingStart;
|
||||
reporting: ReportingStart;
|
||||
}
|
||||
|
||||
export abstract class ExportType<
|
||||
JobParamsType extends object = any,
|
||||
TaskPayloadType extends object = any,
|
||||
SetupDepsType extends BaseExportTypeSetupDeps = BaseExportTypeSetupDeps,
|
||||
StartDepsType extends BaseExportTypeStartDeps = BaseExportTypeStartDeps
|
||||
> {
|
||||
abstract id: string; // ID for exportTypesRegistry.getById()
|
||||
abstract name: string; // user-facing string
|
||||
abstract jobType: string; // for job params
|
||||
|
||||
abstract jobContentEncoding?: 'base64' | 'csv';
|
||||
abstract jobContentExtension: 'pdf' | 'png' | 'csv';
|
||||
|
||||
abstract createJob: CreateJobFn<JobParamsType>;
|
||||
abstract runTask: RunTaskFn<TaskPayloadType>;
|
||||
|
||||
abstract validLicenses: LicenseType[];
|
||||
|
||||
public setupDeps!: SetupDepsType;
|
||||
public startDeps!: StartDepsType;
|
||||
public http!: HttpServiceSetup;
|
||||
|
||||
constructor(
|
||||
core: CoreSetup,
|
||||
public config: ReportingConfigType,
|
||||
public logger: Logger,
|
||||
public context: PluginInitializerContext<ReportingConfigType>
|
||||
) {
|
||||
this.http = core.http;
|
||||
}
|
||||
|
||||
setup(setupDeps: SetupDepsType) {
|
||||
this.setupDeps = setupDeps;
|
||||
}
|
||||
start(startDeps: StartDepsType) {
|
||||
this.startDeps = startDeps;
|
||||
}
|
||||
|
||||
private async getSavedObjectsClient(request: KibanaRequest) {
|
||||
const { savedObjects } = this.startDeps;
|
||||
return savedObjects.getScopedClient(request) as SavedObjectsClientContract;
|
||||
}
|
||||
|
||||
// needed to be protected vs private for the csv search source immediate export type
|
||||
protected getUiSettingsServiceFactory(savedObjectsClient: SavedObjectsClientContract) {
|
||||
const { uiSettings: uiSettingsService } = this.startDeps;
|
||||
const scopedUiSettingsService = uiSettingsService.asScopedToClient(savedObjectsClient);
|
||||
return scopedUiSettingsService;
|
||||
}
|
||||
|
||||
protected async getUiSettingsClient(request: KibanaRequest, logger = this.logger) {
|
||||
const spacesService = this.setupDeps.spaces?.spacesService;
|
||||
const spaceId = this.startDeps.reporting.getSpaceId(request, logger);
|
||||
|
||||
if (spacesService && spaceId) {
|
||||
logger.info(`Creating UI Settings Client for space: ${spaceId}`);
|
||||
}
|
||||
const savedObjectsClient = await this.getSavedObjectsClient(request);
|
||||
return this.getUiSettingsServiceFactory(savedObjectsClient);
|
||||
}
|
||||
|
||||
protected getFakeRequest(
|
||||
headers: Headers,
|
||||
spaceId: string | undefined,
|
||||
logger = this.logger
|
||||
): KibanaRequest {
|
||||
const rawRequest: FakeRawRequest = {
|
||||
headers,
|
||||
path: '/',
|
||||
};
|
||||
const fakeRequest = CoreKibanaRequest.from(rawRequest);
|
||||
|
||||
const spacesService = this.setupDeps.spaces?.spacesService;
|
||||
if (spacesService) {
|
||||
if (spaceId && spaceId !== DEFAULT_SPACE_ID) {
|
||||
logger.info(`Generating request for space: ${spaceId}`);
|
||||
this.setupDeps.basePath.set(fakeRequest, `/s/${spaceId}`);
|
||||
}
|
||||
}
|
||||
return fakeRequest;
|
||||
}
|
||||
|
||||
/*
|
||||
* Returns configurable server info
|
||||
*/
|
||||
protected getServerInfo(): ReportingServerInfo {
|
||||
const serverInfo = this.http.getServerInfo();
|
||||
return {
|
||||
basePath: this.http.basePath.serverBasePath,
|
||||
hostname: serverInfo.hostname,
|
||||
name: serverInfo.name,
|
||||
port: serverInfo.port,
|
||||
uuid: this.context.env.instanceUuid,
|
||||
protocol: serverInfo.protocol,
|
||||
};
|
||||
}
|
||||
}
|
|
@ -10,6 +10,8 @@ export { getFullUrls } from './get_full_urls';
|
|||
export { validateUrls } from './validate_urls';
|
||||
export { generatePngObservable } from './generate_png';
|
||||
export { getCustomLogo } from './get_custom_logo';
|
||||
export { ExportType } from './export_type';
|
||||
export type { BaseExportTypeSetupDeps, BaseExportTypeStartDeps } from './export_type';
|
||||
|
||||
export interface TimeRangeParams {
|
||||
min?: Date | string | number | null;
|
||||
|
|
|
@ -1,16 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { CreateJobFn, CreateJobFnFactory } from '../../types';
|
||||
import { JobParamsCSV, TaskPayloadCSV } from './types';
|
||||
|
||||
export const createJobFnFactory: CreateJobFnFactory<CreateJobFn<JobParamsCSV, TaskPayloadCSV>> =
|
||||
function createJobFactoryFn() {
|
||||
return async function createJob(jobParams) {
|
||||
return jobParams;
|
||||
};
|
||||
};
|
|
@ -0,0 +1,104 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
jest.mock('@kbn/generate-csv', () => ({
|
||||
CsvGenerator: class CsvGeneratorMock {
|
||||
generateData() {
|
||||
return {
|
||||
size: 123,
|
||||
content_type: 'text/csv',
|
||||
};
|
||||
}
|
||||
},
|
||||
}));
|
||||
|
||||
import nodeCrypto from '@elastic/node-crypto';
|
||||
import { coreMock, elasticsearchServiceMock, loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { Writable } from 'stream';
|
||||
import { ReportingCore } from '../..';
|
||||
import { CancellationToken } from '@kbn/reporting-common';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../test_helpers';
|
||||
import { CsvSearchSourceExportType } from './csv_searchsource';
|
||||
import { discoverPluginMock } from '@kbn/discover-plugin/server/mocks';
|
||||
import { dataPluginMock } from '@kbn/data-plugin/server/mocks';
|
||||
import { createMockScreenshottingStart } from '@kbn/screenshotting-plugin/server/mock';
|
||||
|
||||
const mockLogger = loggingSystemMock.createLogger();
|
||||
const encryptionKey = 'tetkey';
|
||||
const headers = { sid: 'cooltestheaders' };
|
||||
let encryptedHeaders: string;
|
||||
let mockReportingCore: ReportingCore;
|
||||
let stream: jest.Mocked<Writable>;
|
||||
let mockCsvSearchSourceExportType: CsvSearchSourceExportType;
|
||||
|
||||
beforeAll(async () => {
|
||||
const crypto = nodeCrypto({ encryptionKey });
|
||||
|
||||
encryptedHeaders = await crypto.encrypt(headers);
|
||||
const configType = createMockConfigSchema({
|
||||
encryptionKey,
|
||||
csv: {
|
||||
checkForFormulas: true,
|
||||
escapeFormulaValues: true,
|
||||
maxSizeBytes: 180000,
|
||||
scroll: { size: 500, duration: '30s' },
|
||||
},
|
||||
});
|
||||
const mockCoreSetup = coreMock.createSetup();
|
||||
const mockCoreStart = coreMock.createStart();
|
||||
const context = coreMock.createPluginInitializerContext(configType);
|
||||
|
||||
mockReportingCore = await createMockReportingCore(configType);
|
||||
|
||||
mockCsvSearchSourceExportType = new CsvSearchSourceExportType(
|
||||
mockCoreSetup,
|
||||
configType,
|
||||
mockLogger,
|
||||
context
|
||||
);
|
||||
|
||||
mockCsvSearchSourceExportType.setup({
|
||||
basePath: { set: jest.fn() },
|
||||
});
|
||||
|
||||
mockCsvSearchSourceExportType.start({
|
||||
esClient: elasticsearchServiceMock.createClusterClient(),
|
||||
savedObjects: mockCoreStart.savedObjects,
|
||||
uiSettings: mockCoreStart.uiSettings,
|
||||
discover: discoverPluginMock.createStartContract(),
|
||||
data: dataPluginMock.createStartContract(),
|
||||
screenshotting: createMockScreenshottingStart(),
|
||||
reporting: mockReportingCore.getContract(),
|
||||
});
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
stream = {} as typeof stream;
|
||||
});
|
||||
|
||||
test('gets the csv content from job parameters', async () => {
|
||||
const payload = await mockCsvSearchSourceExportType.runTask(
|
||||
'cool-job-id',
|
||||
{
|
||||
headers: encryptedHeaders,
|
||||
browserTimezone: 'US/Alaska',
|
||||
searchSource: {},
|
||||
objectType: 'search',
|
||||
title: 'Test Search',
|
||||
version: '7.13.0',
|
||||
},
|
||||
new CancellationToken(),
|
||||
stream
|
||||
);
|
||||
|
||||
expect(payload).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content_type": "text/csv",
|
||||
"size": 123,
|
||||
}
|
||||
`);
|
||||
});
|
|
@ -0,0 +1,100 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { DataPluginStart } from '@kbn/data-plugin/server/plugin';
|
||||
import { DiscoverServerPluginStart } from '@kbn/discover-plugin/server';
|
||||
import { CsvGenerator } from '@kbn/generate-csv';
|
||||
import { CancellationToken } from '@kbn/reporting-common';
|
||||
import { Writable } from 'stream';
|
||||
import {
|
||||
CSV_JOB_TYPE,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
} from '../../../common/constants';
|
||||
import { getFieldFormats } from '../../services';
|
||||
import { ExportType, BaseExportTypeSetupDeps, BaseExportTypeStartDeps } from '../common';
|
||||
import { decryptJobHeaders } from '../common/decrypt_job_headers';
|
||||
import { JobParamsCSV, TaskPayloadCSV } from './types';
|
||||
|
||||
type CsvSearchSourceExportTypeSetupDeps = BaseExportTypeSetupDeps;
|
||||
interface CsvSearchSourceExportTypeStartDeps extends BaseExportTypeStartDeps {
|
||||
discover: DiscoverServerPluginStart;
|
||||
data: DataPluginStart;
|
||||
}
|
||||
|
||||
export class CsvSearchSourceExportType extends ExportType<
|
||||
JobParamsCSV,
|
||||
TaskPayloadCSV,
|
||||
CsvSearchSourceExportTypeSetupDeps,
|
||||
CsvSearchSourceExportTypeStartDeps
|
||||
> {
|
||||
id = 'csv_searchsource';
|
||||
name = CSV_JOB_TYPE;
|
||||
jobType = CSV_JOB_TYPE;
|
||||
jobContentEncoding = 'base64' as const;
|
||||
jobContentExtension = 'csv' as const;
|
||||
validLicenses = [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
];
|
||||
|
||||
constructor(...args: ConstructorParameters<typeof ExportType>) {
|
||||
super(...args);
|
||||
this.logger = this.logger.get('csv-searchsource-export');
|
||||
}
|
||||
|
||||
public createJob = async (jobParams: JobParamsCSV) => {
|
||||
return { ...jobParams };
|
||||
};
|
||||
|
||||
public runTask = async (
|
||||
jobId: string,
|
||||
job: TaskPayloadCSV,
|
||||
cancellationToken: CancellationToken,
|
||||
stream: Writable
|
||||
) => {
|
||||
const { encryptionKey, csv: csvConfig } = this.config;
|
||||
const logger = this.logger.get(`execute-job:${jobId}`);
|
||||
const headers = await decryptJobHeaders(encryptionKey, job.headers, logger);
|
||||
const fakeRequest = this.getFakeRequest(headers, job.spaceId, logger);
|
||||
const uiSettings = await this.getUiSettingsClient(fakeRequest, logger);
|
||||
const dataPluginStart = this.startDeps.data;
|
||||
const fieldFormatsRegistry = await getFieldFormats().fieldFormatServiceFactory(uiSettings);
|
||||
|
||||
const es = this.startDeps.esClient.asScoped(fakeRequest);
|
||||
const searchSourceStart = await dataPluginStart.search.searchSource.asScoped(fakeRequest);
|
||||
|
||||
const clients = {
|
||||
uiSettings,
|
||||
data: dataPluginStart.search.asScoped(fakeRequest),
|
||||
es,
|
||||
};
|
||||
const dependencies = {
|
||||
searchSourceStart,
|
||||
fieldFormatsRegistry,
|
||||
};
|
||||
|
||||
const csv = new CsvGenerator(
|
||||
job,
|
||||
csvConfig,
|
||||
clients,
|
||||
dependencies,
|
||||
cancellationToken,
|
||||
logger,
|
||||
stream
|
||||
);
|
||||
return await csv.generateData();
|
||||
};
|
||||
}
|
|
@ -1,78 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
jest.mock('@kbn/generate-csv', () => ({
|
||||
CsvGenerator: class CsvGeneratorMock {
|
||||
generateData() {
|
||||
return {
|
||||
size: 123,
|
||||
content_type: 'text/csv',
|
||||
};
|
||||
}
|
||||
},
|
||||
}));
|
||||
|
||||
import nodeCrypto from '@elastic/node-crypto';
|
||||
import { loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { Writable } from 'stream';
|
||||
import { ReportingCore } from '../..';
|
||||
import { CancellationToken } from '@kbn/reporting-common';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../test_helpers';
|
||||
import { runTaskFnFactory } from './execute_job';
|
||||
|
||||
const logger = loggingSystemMock.createLogger();
|
||||
const encryptionKey = 'tetkey';
|
||||
const headers = { sid: 'cooltestheaders' };
|
||||
let encryptedHeaders: string;
|
||||
let reportingCore: ReportingCore;
|
||||
let stream: jest.Mocked<Writable>;
|
||||
|
||||
beforeAll(async () => {
|
||||
const crypto = nodeCrypto({ encryptionKey });
|
||||
|
||||
encryptedHeaders = await crypto.encrypt(headers);
|
||||
reportingCore = await createMockReportingCore(
|
||||
createMockConfigSchema({
|
||||
encryptionKey,
|
||||
csv: {
|
||||
checkForFormulas: true,
|
||||
escapeFormulaValues: true,
|
||||
maxSizeBytes: 180000,
|
||||
scroll: { size: 500, duration: '30s' },
|
||||
},
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
stream = {} as typeof stream;
|
||||
});
|
||||
|
||||
test('gets the csv content from job parameters', async () => {
|
||||
const runTask = runTaskFnFactory(reportingCore, logger);
|
||||
|
||||
const payload = await runTask(
|
||||
'cool-job-id',
|
||||
{
|
||||
headers: encryptedHeaders,
|
||||
browserTimezone: 'US/Alaska',
|
||||
searchSource: {},
|
||||
objectType: 'search',
|
||||
title: 'Test Search',
|
||||
version: '7.13.0',
|
||||
},
|
||||
new CancellationToken(),
|
||||
stream
|
||||
);
|
||||
|
||||
expect(payload).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content_type": "text/csv",
|
||||
"size": 123,
|
||||
}
|
||||
`);
|
||||
});
|
|
@ -1,54 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { CsvGenerator } from '@kbn/generate-csv';
|
||||
import { TaskPayloadCSV } from './types';
|
||||
import { getFieldFormats } from '../../services';
|
||||
import { RunTaskFn, RunTaskFnFactory } from '../../types';
|
||||
import { decryptJobHeaders } from '../common';
|
||||
|
||||
export const runTaskFnFactory: RunTaskFnFactory<RunTaskFn<TaskPayloadCSV>> = (
|
||||
reporting,
|
||||
parentLogger
|
||||
) => {
|
||||
const { encryptionKey, csv: csvConfig } = reporting.getConfig();
|
||||
|
||||
return async function runTask(jobId, job, cancellationToken, stream) {
|
||||
const logger = parentLogger.get(`execute-job:${jobId}`);
|
||||
const headers = await decryptJobHeaders(encryptionKey, job.headers, logger);
|
||||
const fakeRequest = reporting.getFakeRequest(headers, job.spaceId, logger);
|
||||
const uiSettings = await reporting.getUiSettingsClient(fakeRequest, logger);
|
||||
const dataPluginStart = await reporting.getDataService();
|
||||
const fieldFormatsRegistry = await getFieldFormats().fieldFormatServiceFactory(uiSettings);
|
||||
|
||||
const [es, searchSourceStart] = await Promise.all([
|
||||
(await reporting.getEsClient()).asScoped(fakeRequest),
|
||||
await dataPluginStart.search.searchSource.asScoped(fakeRequest),
|
||||
]);
|
||||
|
||||
const clients = {
|
||||
uiSettings,
|
||||
data: dataPluginStart.search.asScoped(fakeRequest),
|
||||
es,
|
||||
};
|
||||
const dependencies = {
|
||||
searchSourceStart,
|
||||
fieldFormatsRegistry,
|
||||
};
|
||||
|
||||
const csv = new CsvGenerator(
|
||||
job,
|
||||
csvConfig,
|
||||
clients,
|
||||
dependencies,
|
||||
cancellationToken,
|
||||
logger,
|
||||
stream
|
||||
);
|
||||
return await csv.generateData();
|
||||
};
|
||||
};
|
|
@ -5,36 +5,4 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
CSV_JOB_TYPE as jobType,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
} from '../../../common/constants';
|
||||
import { CreateJobFn, ExportTypeDefinition, RunTaskFn } from '../../types';
|
||||
import { createJobFnFactory } from './create_job';
|
||||
import { runTaskFnFactory } from './execute_job';
|
||||
import { metadata } from './metadata';
|
||||
import { JobParamsCSV, TaskPayloadCSV } from './types';
|
||||
|
||||
export const getExportType = (): ExportTypeDefinition<
|
||||
CreateJobFn<JobParamsCSV>,
|
||||
RunTaskFn<TaskPayloadCSV>
|
||||
> => ({
|
||||
...metadata,
|
||||
jobType,
|
||||
jobContentExtension: 'csv',
|
||||
createJobFnFactory,
|
||||
runTaskFnFactory,
|
||||
validLicenses: [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
],
|
||||
});
|
||||
export { CsvSearchSourceExportType } from './csv_searchsource';
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { CSV_JOB_TYPE } from '../../../common/constants';
|
||||
|
||||
export const metadata = {
|
||||
id: 'csv_searchsource',
|
||||
name: CSV_JOB_TYPE,
|
||||
};
|
|
@ -0,0 +1,137 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { KibanaRequest } from '@kbn/core-http-server';
|
||||
import { DataPluginStart } from '@kbn/data-plugin/server/plugin';
|
||||
import { DiscoverServerPluginStart } from '@kbn/discover-plugin/server';
|
||||
import { CsvGenerator } from '@kbn/generate-csv';
|
||||
import { CancellationToken, TaskRunResult } from '@kbn/reporting-common';
|
||||
import { Writable } from 'stream';
|
||||
import {
|
||||
ExportType,
|
||||
BaseExportTypeSetupDeps,
|
||||
BaseExportTypeStartDeps,
|
||||
} from '../common/export_type';
|
||||
import {
|
||||
CSV_SEARCHSOURCE_IMMEDIATE_TYPE,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
} from '../../../common/constants';
|
||||
import { getFieldFormats } from '../../services';
|
||||
import { ReportingRequestHandlerContext } from '../../types';
|
||||
import { JobParamsDownloadCSV } from './types';
|
||||
|
||||
type CsvSearchSourceImmediateExportTypeSetupDeps = BaseExportTypeSetupDeps;
|
||||
interface CsvSearchSourceImmediateExportTypeStartDeps extends BaseExportTypeStartDeps {
|
||||
discover: DiscoverServerPluginStart;
|
||||
data: DataPluginStart;
|
||||
}
|
||||
|
||||
/*
|
||||
* ImmediateExecuteFn receives the job doc payload because the payload was
|
||||
* generated in the ScheduleFn
|
||||
*/
|
||||
export type ImmediateExecuteFn = (
|
||||
jobId: null,
|
||||
job: JobParamsDownloadCSV,
|
||||
context: ReportingRequestHandlerContext,
|
||||
stream: Writable,
|
||||
req: KibanaRequest
|
||||
) => Promise<TaskRunResult>;
|
||||
|
||||
export class CsvSearchSourceImmediateExportType extends ExportType<
|
||||
JobParamsDownloadCSV,
|
||||
ImmediateExecuteFn,
|
||||
CsvSearchSourceImmediateExportTypeSetupDeps,
|
||||
CsvSearchSourceImmediateExportTypeStartDeps
|
||||
> {
|
||||
id = CSV_SEARCHSOURCE_IMMEDIATE_TYPE;
|
||||
name = CSV_SEARCHSOURCE_IMMEDIATE_TYPE;
|
||||
jobType = CSV_SEARCHSOURCE_IMMEDIATE_TYPE;
|
||||
jobContentEncoding = 'base64' as const;
|
||||
jobContentExtension = 'csv' as const;
|
||||
validLicenses = [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
];
|
||||
|
||||
constructor(...args: ConstructorParameters<typeof ExportType>) {
|
||||
super(...args);
|
||||
this.logger = this.logger.get('csv-searchsource-export');
|
||||
}
|
||||
|
||||
public createJob = async () => {
|
||||
throw new Error(`immediate download has no create job handler!`);
|
||||
};
|
||||
// @ts-ignore expected type failure from deprecated export type
|
||||
public runTask = async (
|
||||
_jobId: string | null,
|
||||
immediateJobParams: JobParamsDownloadCSV,
|
||||
context: ReportingRequestHandlerContext,
|
||||
stream: Writable,
|
||||
req: KibanaRequest
|
||||
) => {
|
||||
const job = {
|
||||
objectType: 'immediate-search',
|
||||
...immediateJobParams,
|
||||
};
|
||||
|
||||
const dataPluginStart = this.startDeps.data;
|
||||
const savedObjectsClient = (await context.core).savedObjects.client;
|
||||
const uiSettings = this.getUiSettingsServiceFactory(savedObjectsClient);
|
||||
const fieldFormatsRegistry = await getFieldFormats().fieldFormatServiceFactory(uiSettings);
|
||||
|
||||
const es = this.startDeps.esClient.asScoped(req);
|
||||
const searchSourceStart = await dataPluginStart.search.searchSource.asScoped(req);
|
||||
const clients = {
|
||||
uiSettings,
|
||||
data: dataPluginStart.search.asScoped(req),
|
||||
es,
|
||||
};
|
||||
const dependencies = {
|
||||
fieldFormatsRegistry,
|
||||
searchSourceStart,
|
||||
};
|
||||
const cancellationToken = new CancellationToken();
|
||||
const csvConfig = this.config.csv;
|
||||
const csv = new CsvGenerator(
|
||||
job,
|
||||
csvConfig,
|
||||
clients,
|
||||
dependencies,
|
||||
cancellationToken,
|
||||
this.logger,
|
||||
stream
|
||||
);
|
||||
const result = await csv.generateData();
|
||||
|
||||
if (result.csv_contains_formulas) {
|
||||
this.logger.warn(`CSV may contain formulas whose values have been escaped`);
|
||||
}
|
||||
|
||||
if (result.max_size_reached) {
|
||||
this.logger.warn(`Max size reached: CSV output truncated`);
|
||||
}
|
||||
|
||||
const { warnings } = result;
|
||||
if (warnings) {
|
||||
warnings.forEach((warning) => {
|
||||
this.logger.warn(warning);
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
}
|
|
@ -1,89 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { KibanaRequest } from '@kbn/core/server';
|
||||
import { Writable } from 'stream';
|
||||
import { CancellationToken, TaskRunResult } from '@kbn/reporting-common';
|
||||
import { CsvGenerator } from '@kbn/generate-csv';
|
||||
import { getFieldFormats } from '../../services';
|
||||
import { ReportingRequestHandlerContext, RunTaskFnFactory } from '../../types';
|
||||
import { JobParamsDownloadCSV } from './types';
|
||||
|
||||
/*
|
||||
* ImmediateExecuteFn receives the job doc payload because the payload was
|
||||
* generated in the ScheduleFn
|
||||
*/
|
||||
export type ImmediateExecuteFn = (
|
||||
jobId: null,
|
||||
job: JobParamsDownloadCSV,
|
||||
context: ReportingRequestHandlerContext,
|
||||
stream: Writable,
|
||||
req: KibanaRequest
|
||||
) => Promise<TaskRunResult>;
|
||||
|
||||
export const runTaskFnFactory: RunTaskFnFactory<ImmediateExecuteFn> = function executeJobFactoryFn(
|
||||
reporting,
|
||||
parentLogger
|
||||
) {
|
||||
const { csv: csvConfig } = reporting.getConfig();
|
||||
const logger = parentLogger.get('execute-job');
|
||||
|
||||
return async function runTask(_jobId, immediateJobParams, context, stream, req) {
|
||||
const job = {
|
||||
objectType: 'immediate-search',
|
||||
...immediateJobParams,
|
||||
};
|
||||
|
||||
const dataPluginStart = await reporting.getDataService();
|
||||
const savedObjectsClient = (await context.core).savedObjects.client;
|
||||
const uiSettings = await reporting.getUiSettingsServiceFactory(savedObjectsClient);
|
||||
const fieldFormatsRegistry = await getFieldFormats().fieldFormatServiceFactory(uiSettings);
|
||||
|
||||
const [es, searchSourceStart] = await Promise.all([
|
||||
(await reporting.getEsClient()).asScoped(req),
|
||||
await dataPluginStart.search.searchSource.asScoped(req),
|
||||
]);
|
||||
const clients = {
|
||||
uiSettings,
|
||||
data: dataPluginStart.search.asScoped(req),
|
||||
es,
|
||||
};
|
||||
const dependencies = {
|
||||
fieldFormatsRegistry,
|
||||
searchSourceStart,
|
||||
};
|
||||
const cancellationToken = new CancellationToken();
|
||||
|
||||
const csv = new CsvGenerator(
|
||||
job,
|
||||
csvConfig,
|
||||
clients,
|
||||
dependencies,
|
||||
cancellationToken,
|
||||
logger,
|
||||
stream
|
||||
);
|
||||
const result = await csv.generateData();
|
||||
|
||||
if (result.csv_contains_formulas) {
|
||||
logger.warn(`CSV may contain formulas whose values have been escaped`);
|
||||
}
|
||||
|
||||
if (result.max_size_reached) {
|
||||
logger.warn(`Max size reached: CSV output truncated`);
|
||||
}
|
||||
|
||||
const { warnings } = result;
|
||||
if (warnings) {
|
||||
warnings.forEach((warning) => {
|
||||
logger.warn(warning);
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
};
|
||||
};
|
|
@ -5,37 +5,4 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
CSV_SEARCHSOURCE_IMMEDIATE_TYPE,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
} from '../../../common/constants';
|
||||
import { ExportTypeDefinition } from '../../types';
|
||||
import { ImmediateExecuteFn, runTaskFnFactory } from './execute_job';
|
||||
import { metadata } from './metadata';
|
||||
|
||||
/*
|
||||
* These functions are exported to share with the API route handler that
|
||||
* generates csv from saved object immediately on request.
|
||||
*/
|
||||
export { runTaskFnFactory } from './execute_job';
|
||||
|
||||
export const getExportType = (): ExportTypeDefinition<null, ImmediateExecuteFn> => ({
|
||||
...metadata,
|
||||
jobType: CSV_SEARCHSOURCE_IMMEDIATE_TYPE,
|
||||
jobContentExtension: 'csv',
|
||||
createJobFnFactory: null,
|
||||
runTaskFnFactory,
|
||||
validLicenses: [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
],
|
||||
});
|
||||
export { CsvSearchSourceImmediateExportType } from './csv_searchsource_immediate';
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { CSV_SEARCHSOURCE_IMMEDIATE_TYPE } from '../../../common/constants';
|
||||
|
||||
export const metadata = {
|
||||
id: CSV_SEARCHSOURCE_IMMEDIATE_TYPE,
|
||||
name: CSV_SEARCHSOURCE_IMMEDIATE_TYPE,
|
||||
};
|
|
@ -1,42 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import Boom from '@hapi/boom';
|
||||
import { JobParamsCsvFromSavedObject, TaskPayloadCsvFromSavedObject } from '../../../common/types';
|
||||
import { CreateJobFn, CreateJobFnFactory } from '../../types';
|
||||
|
||||
type CreateJobFnType = CreateJobFn<JobParamsCsvFromSavedObject, TaskPayloadCsvFromSavedObject>;
|
||||
|
||||
export const createJobFnFactory: CreateJobFnFactory<CreateJobFnType> = function createJobFactoryFn(
|
||||
reporting
|
||||
) {
|
||||
return async function createJob(jobParams, _context, req) {
|
||||
// 1. Validation of locatorParams
|
||||
const { locatorParams } = jobParams;
|
||||
const { id, params } = locatorParams[0];
|
||||
if (
|
||||
!locatorParams ||
|
||||
!Array.isArray(locatorParams) ||
|
||||
locatorParams.length !== 1 ||
|
||||
id !== 'DISCOVER_APP_LOCATOR' ||
|
||||
!params
|
||||
) {
|
||||
throw Boom.badRequest('Invalid Job params: must contain a single Discover App locator');
|
||||
}
|
||||
|
||||
if (!params || !params.savedSearchId || typeof params.savedSearchId !== 'string') {
|
||||
throw Boom.badRequest('Invalid Discover App locator: must contain a savedSearchId');
|
||||
}
|
||||
|
||||
// use Discover contract to get the title of the report from job params
|
||||
const { discover: discoverPluginStart } = await reporting.getPluginStartDeps();
|
||||
const locatorClient = await discoverPluginStart.locator.asScopedClient(req);
|
||||
const title = await locatorClient.titleFromLocator(params);
|
||||
|
||||
return { ...jobParams, title };
|
||||
};
|
||||
};
|
138
x-pack/plugins/reporting/server/export_types/csv_v2/csv_v2.ts
Normal file
138
x-pack/plugins/reporting/server/export_types/csv_v2/csv_v2.ts
Normal file
|
@ -0,0 +1,138 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import Boom from '@hapi/boom';
|
||||
import { KibanaRequest } from '@kbn/core/server';
|
||||
import { DiscoverServerPluginStart } from '@kbn/discover-plugin/server';
|
||||
import { DataPluginStart } from '@kbn/data-plugin/server/plugin';
|
||||
import { CsvGenerator } from '@kbn/generate-csv';
|
||||
import { Writable } from 'stream';
|
||||
import { CancellationToken } from '@kbn/reporting-common';
|
||||
import { JobParamsCsvFromSavedObject, TaskPayloadCsvFromSavedObject } from '../../../common/types';
|
||||
import {
|
||||
CSV_REPORT_TYPE_V2,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
} from '../../../common/constants';
|
||||
import { ExportType, BaseExportTypeSetupDeps, BaseExportTypeStartDeps } from '../common';
|
||||
import { ReportingRequestHandlerContext } from '../../types';
|
||||
import { getFieldFormats } from '../../services';
|
||||
import { decryptJobHeaders } from '../common/decrypt_job_headers';
|
||||
|
||||
type CsvV2ExportTypeSetupDeps = BaseExportTypeSetupDeps;
|
||||
export interface CsvV2ExportTypeStartDeps extends BaseExportTypeStartDeps {
|
||||
discover: DiscoverServerPluginStart;
|
||||
data: DataPluginStart;
|
||||
}
|
||||
|
||||
export class CsvV2ExportType extends ExportType<
|
||||
JobParamsCsvFromSavedObject,
|
||||
TaskPayloadCsvFromSavedObject,
|
||||
CsvV2ExportTypeSetupDeps,
|
||||
CsvV2ExportTypeStartDeps
|
||||
> {
|
||||
id = CSV_REPORT_TYPE_V2;
|
||||
name = CSV_REPORT_TYPE_V2;
|
||||
jobType = CSV_REPORT_TYPE_V2;
|
||||
jobContentEncoding = 'base64' as const;
|
||||
jobContentExtension = 'csv' as const;
|
||||
validLicenses = [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
];
|
||||
|
||||
constructor(...args: ConstructorParameters<typeof ExportType>) {
|
||||
super(...args);
|
||||
const logger = args[2];
|
||||
this.logger = logger.get('csv-export-v2');
|
||||
}
|
||||
|
||||
public createJob = async (
|
||||
jobParams: JobParamsCsvFromSavedObject,
|
||||
_context: ReportingRequestHandlerContext,
|
||||
req: KibanaRequest
|
||||
) => {
|
||||
// 1. Validation of locatorParams
|
||||
const { locatorParams } = jobParams;
|
||||
const { id, params } = locatorParams[0];
|
||||
if (
|
||||
!locatorParams ||
|
||||
!Array.isArray(locatorParams) ||
|
||||
locatorParams.length !== 1 ||
|
||||
id !== 'DISCOVER_APP_LOCATOR' ||
|
||||
!params
|
||||
) {
|
||||
throw Boom.badRequest('Invalid Job params: must contain a single Discover App locator');
|
||||
}
|
||||
|
||||
if (!params || !params.savedSearchId || typeof params.savedSearchId !== 'string') {
|
||||
throw Boom.badRequest('Invalid Discover App locator: must contain a savedSearchId');
|
||||
}
|
||||
|
||||
// use Discover contract to get the title of the report from job params
|
||||
const { discover: discoverPluginStart } = this.startDeps;
|
||||
const locatorClient = await discoverPluginStart.locator.asScopedClient(req);
|
||||
const title = await locatorClient.titleFromLocator(params);
|
||||
|
||||
return { ...jobParams, title, objectType: 'search', isDeprecated: false };
|
||||
};
|
||||
|
||||
public runTask = async (
|
||||
jobId: string,
|
||||
job: TaskPayloadCsvFromSavedObject,
|
||||
cancellationToken: CancellationToken,
|
||||
stream: Writable
|
||||
) => {
|
||||
const config = this.config;
|
||||
const { encryptionKey, csv: csvConfig } = config;
|
||||
const logger = this.logger.get(`execute:${jobId}`);
|
||||
|
||||
const headers = await decryptJobHeaders(encryptionKey, job.headers, logger);
|
||||
const fakeRequest = this.getFakeRequest(headers, job.spaceId, logger);
|
||||
const uiSettings = await this.getUiSettingsClient(fakeRequest, logger);
|
||||
const fieldFormatsRegistry = await getFieldFormats().fieldFormatServiceFactory(uiSettings);
|
||||
const { data: dataPluginStart, discover: discoverPluginStart } = this.startDeps;
|
||||
const data = dataPluginStart.search.asScoped(fakeRequest);
|
||||
|
||||
const { locatorParams } = job;
|
||||
const { params } = locatorParams[0];
|
||||
|
||||
// use Discover contract to convert the job params into inputs for CsvGenerator
|
||||
const locatorClient = await discoverPluginStart.locator.asScopedClient(fakeRequest);
|
||||
const columns = await locatorClient.columnsFromLocator(params);
|
||||
const searchSource = await locatorClient.searchSourceFromLocator(params);
|
||||
|
||||
const es = this.startDeps.esClient.asScoped(fakeRequest);
|
||||
const searchSourceStart = await dataPluginStart.search.searchSource.asScoped(fakeRequest);
|
||||
|
||||
const clients = { uiSettings, data, es };
|
||||
const dependencies = { searchSourceStart, fieldFormatsRegistry };
|
||||
|
||||
const csv = new CsvGenerator(
|
||||
{
|
||||
columns,
|
||||
searchSource: searchSource.getSerializedFields(true),
|
||||
...job,
|
||||
},
|
||||
csvConfig,
|
||||
clients,
|
||||
dependencies,
|
||||
cancellationToken,
|
||||
logger,
|
||||
stream
|
||||
);
|
||||
return await csv.generateData();
|
||||
};
|
||||
}
|
|
@ -1,62 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { CsvGenerator } from '@kbn/generate-csv';
|
||||
import type { TaskPayloadCsvFromSavedObject } from '../../../common/types';
|
||||
import { getFieldFormats } from '../../services';
|
||||
import type { RunTaskFn, RunTaskFnFactory } from '../../types';
|
||||
import { decryptJobHeaders } from '../common';
|
||||
|
||||
type RunTaskFnType = RunTaskFn<TaskPayloadCsvFromSavedObject>;
|
||||
|
||||
export const runTaskFnFactory: RunTaskFnFactory<RunTaskFnType> = (reporting, _logger) => {
|
||||
const config = reporting.getConfig();
|
||||
const { encryptionKey, csv: csvConfig } = config;
|
||||
|
||||
return async function runTask(jobId, job, cancellationToken, stream) {
|
||||
const logger = _logger.get(`execute:${jobId}`);
|
||||
|
||||
const headers = await decryptJobHeaders(encryptionKey, job.headers, logger);
|
||||
const fakeRequest = reporting.getFakeRequest(headers, job.spaceId, logger);
|
||||
const uiSettings = await reporting.getUiSettingsClient(fakeRequest, logger);
|
||||
const fieldFormatsRegistry = await getFieldFormats().fieldFormatServiceFactory(uiSettings);
|
||||
const { data: dataPluginStart, discover: discoverPluginStart } =
|
||||
await reporting.getPluginStartDeps();
|
||||
const data = dataPluginStart.search.asScoped(fakeRequest);
|
||||
|
||||
const { locatorParams } = job;
|
||||
const { params } = locatorParams[0];
|
||||
|
||||
// use Discover contract to convert the job params into inputs for CsvGenerator
|
||||
const locatorClient = await discoverPluginStart.locator.asScopedClient(fakeRequest);
|
||||
const columns = await locatorClient.columnsFromLocator(params);
|
||||
const searchSource = await locatorClient.searchSourceFromLocator(params);
|
||||
|
||||
const [es, searchSourceStart] = await Promise.all([
|
||||
(await reporting.getEsClient()).asScoped(fakeRequest),
|
||||
await dataPluginStart.search.searchSource.asScoped(fakeRequest),
|
||||
]);
|
||||
|
||||
const clients = { uiSettings, data, es };
|
||||
const dependencies = { searchSourceStart, fieldFormatsRegistry };
|
||||
|
||||
const csv = new CsvGenerator(
|
||||
{
|
||||
columns,
|
||||
searchSource: searchSource.getSerializedFields(true),
|
||||
...job,
|
||||
},
|
||||
csvConfig,
|
||||
clients,
|
||||
dependencies,
|
||||
cancellationToken,
|
||||
logger,
|
||||
stream
|
||||
);
|
||||
return await csv.generateData();
|
||||
};
|
||||
};
|
|
@ -5,36 +5,4 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
CSV_REPORT_TYPE_V2 as CSV_JOB_TYPE,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
} from '../../../common/constants';
|
||||
import { JobParamsCsvFromSavedObject, TaskPayloadCsvFromSavedObject } from '../../../common/types';
|
||||
import { CreateJobFn, ExportTypeDefinition, RunTaskFn } from '../../types';
|
||||
import { createJobFnFactory } from './create_job';
|
||||
import { runTaskFnFactory } from './execute_job';
|
||||
|
||||
export const getExportType = (): ExportTypeDefinition<
|
||||
CreateJobFn<JobParamsCsvFromSavedObject>,
|
||||
RunTaskFn<TaskPayloadCsvFromSavedObject>
|
||||
> => ({
|
||||
id: CSV_JOB_TYPE,
|
||||
name: CSV_JOB_TYPE,
|
||||
jobType: CSV_JOB_TYPE,
|
||||
jobContentExtension: 'csv',
|
||||
createJobFnFactory,
|
||||
runTaskFnFactory,
|
||||
validLicenses: [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_BASIC,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
],
|
||||
});
|
||||
export { CsvV2ExportType } from './csv_v2';
|
||||
|
|
|
@ -1,24 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { CreateJobFn, CreateJobFnFactory } from '../../../types';
|
||||
import { validateUrls } from '../../common';
|
||||
import { JobParamsPNGDeprecated, TaskPayloadPNG } from '../types';
|
||||
|
||||
export const createJobFnFactory: CreateJobFnFactory<
|
||||
CreateJobFn<JobParamsPNGDeprecated, TaskPayloadPNG>
|
||||
> = function createJobFactoryFn() {
|
||||
return async function createJob(jobParams) {
|
||||
validateUrls([jobParams.relativeUrl]);
|
||||
|
||||
return {
|
||||
...jobParams,
|
||||
isDeprecated: true,
|
||||
forceNow: new Date().toISOString(),
|
||||
};
|
||||
};
|
||||
};
|
|
@ -1,67 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { TaskRunResult } from '@kbn/reporting-common';
|
||||
import apm from 'elastic-apm-node';
|
||||
import * as Rx from 'rxjs';
|
||||
import { finalize, map, mergeMap, takeUntil, tap } from 'rxjs/operators';
|
||||
import { REPORTING_TRANSACTION_TYPE } from '../../../../common/constants';
|
||||
import { RunTaskFn, RunTaskFnFactory } from '../../../types';
|
||||
import { decryptJobHeaders, generatePngObservable, getFullUrls } from '../../common';
|
||||
import { TaskPayloadPNG } from '../types';
|
||||
|
||||
export const runTaskFnFactory: RunTaskFnFactory<RunTaskFn<TaskPayloadPNG>> =
|
||||
function executeJobFactoryFn(reporting, parentLogger) {
|
||||
const { encryptionKey } = reporting.getConfig();
|
||||
|
||||
return function runTask(jobId, job, cancellationToken, stream) {
|
||||
const apmTrans = apm.startTransaction('execute-job-png', REPORTING_TRANSACTION_TYPE);
|
||||
const apmGetAssets = apmTrans?.startSpan('get-assets', 'setup');
|
||||
let apmGeneratePng: { end: () => void } | null | undefined;
|
||||
|
||||
const jobLogger = parentLogger.get(`execute:${jobId}`);
|
||||
const process$: Rx.Observable<TaskRunResult> = Rx.of(1).pipe(
|
||||
mergeMap(() => decryptJobHeaders(encryptionKey, job.headers, jobLogger)),
|
||||
mergeMap((headers) => {
|
||||
const [url] = getFullUrls(reporting.getServerInfo(), reporting.getConfig(), job);
|
||||
|
||||
apmGetAssets?.end();
|
||||
apmGeneratePng = apmTrans?.startSpan('generate-png-pipeline', 'execute');
|
||||
const screenshotFn = () =>
|
||||
reporting.getScreenshots({
|
||||
headers,
|
||||
urls: [url],
|
||||
browserTimezone: job.browserTimezone,
|
||||
layout: {
|
||||
...job.layout,
|
||||
id: 'preserve_layout',
|
||||
},
|
||||
});
|
||||
return generatePngObservable(screenshotFn, jobLogger, {
|
||||
headers,
|
||||
urls: [url],
|
||||
browserTimezone: job.browserTimezone,
|
||||
layout: {
|
||||
...job.layout,
|
||||
id: 'preserve_layout',
|
||||
},
|
||||
});
|
||||
}),
|
||||
tap(({ buffer }) => stream.write(buffer)),
|
||||
map(({ metrics, warnings }) => ({
|
||||
content_type: 'image/png',
|
||||
metrics: { png: metrics },
|
||||
warnings,
|
||||
})),
|
||||
tap({ error: (error) => jobLogger.error(error) }),
|
||||
finalize(() => apmGeneratePng?.end())
|
||||
);
|
||||
|
||||
const stop$ = Rx.fromEventPattern(cancellationToken.on);
|
||||
return Rx.lastValueFrom(process$.pipe(takeUntil(stop$)));
|
||||
};
|
||||
};
|
|
@ -5,35 +5,4 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
PNG_JOB_TYPE as jobType,
|
||||
} from '../../../common/constants';
|
||||
import { CreateJobFn, ExportTypeDefinition, RunTaskFn } from '../../types';
|
||||
import { createJobFnFactory } from './create_job';
|
||||
import { runTaskFnFactory } from './execute_job';
|
||||
import { metadata } from './metadata';
|
||||
import { JobParamsPNGDeprecated, TaskPayloadPNG } from './types';
|
||||
|
||||
export const getExportType = (): ExportTypeDefinition<
|
||||
CreateJobFn<JobParamsPNGDeprecated>,
|
||||
RunTaskFn<TaskPayloadPNG>
|
||||
> => ({
|
||||
...metadata,
|
||||
jobType,
|
||||
jobContentEncoding: 'base64',
|
||||
jobContentExtension: 'PNG',
|
||||
createJobFnFactory,
|
||||
runTaskFnFactory,
|
||||
validLicenses: [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
],
|
||||
});
|
||||
export { PngV1ExportType } from './png';
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const metadata = {
|
||||
id: 'png',
|
||||
name: 'PNG',
|
||||
};
|
|
@ -6,27 +6,27 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { coreMock, elasticsearchServiceMock, loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { Writable } from 'stream';
|
||||
import { ReportingCore } from '../../..';
|
||||
import { CancellationToken } from '@kbn/reporting-common';
|
||||
import { cryptoFactory } from '../../../lib';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../../test_helpers';
|
||||
import { generatePngObservable } from '../../common';
|
||||
import { TaskPayloadPNG } from '../types';
|
||||
import { runTaskFnFactory } from '.';
|
||||
import { cryptoFactory } from '../../lib';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../test_helpers';
|
||||
import { generatePngObservable } from '../common';
|
||||
import { TaskPayloadPNG } from './types';
|
||||
import { PngV1ExportType } from './png';
|
||||
import { ScreenshottingStart } from '@kbn/screenshotting-plugin/server';
|
||||
|
||||
jest.mock('../../common/generate_png');
|
||||
jest.mock('../common/generate_png');
|
||||
|
||||
let content: string;
|
||||
let mockReporting: ReportingCore;
|
||||
let mockPngExportType: PngV1ExportType;
|
||||
let stream: jest.Mocked<Writable>;
|
||||
|
||||
const cancellationToken = {
|
||||
on: jest.fn(),
|
||||
} as unknown as CancellationToken;
|
||||
|
||||
const getMockLogger = () => loggingSystemMock.createLogger();
|
||||
const mockLogger = loggingSystemMock.createLogger();
|
||||
|
||||
const mockEncryptionKey = 'abcabcsecuresecret';
|
||||
const encryptHeaders = async (headers: Record<string, string>) => {
|
||||
|
@ -40,15 +40,25 @@ beforeEach(async () => {
|
|||
content = '';
|
||||
stream = { write: jest.fn((chunk) => (content += chunk)) } as unknown as typeof stream;
|
||||
|
||||
const mockReportingConfig = createMockConfigSchema({
|
||||
encryptionKey: mockEncryptionKey,
|
||||
queue: {
|
||||
indexInterval: 'daily',
|
||||
timeout: Infinity,
|
||||
},
|
||||
});
|
||||
const configType = createMockConfigSchema({ encryptionKey: mockEncryptionKey });
|
||||
const context = coreMock.createPluginInitializerContext(configType);
|
||||
|
||||
mockReporting = await createMockReportingCore(mockReportingConfig);
|
||||
const mockCoreSetup = coreMock.createSetup();
|
||||
const mockCoreStart = coreMock.createStart();
|
||||
const mockReportingCore = await createMockReportingCore(createMockConfigSchema());
|
||||
|
||||
mockPngExportType = new PngV1ExportType(mockCoreSetup, configType, mockLogger, context);
|
||||
|
||||
mockPngExportType.setup({
|
||||
basePath: { set: jest.fn() },
|
||||
});
|
||||
mockPngExportType.start({
|
||||
esClient: elasticsearchServiceMock.createClusterClient(),
|
||||
savedObjects: mockCoreStart.savedObjects,
|
||||
uiSettings: mockCoreStart.uiSettings,
|
||||
screenshotting: {} as unknown as ScreenshottingStart,
|
||||
reporting: mockReportingCore.getContract(),
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => (generatePngObservable as jest.Mock).mockReset());
|
||||
|
@ -57,9 +67,8 @@ test(`passes browserTimezone to generatePng`, async () => {
|
|||
const encryptedHeaders = await encryptHeaders({});
|
||||
(generatePngObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from('') }));
|
||||
|
||||
const runTask = runTaskFnFactory(mockReporting, getMockLogger());
|
||||
const browserTimezone = 'UTC';
|
||||
await runTask(
|
||||
await mockPngExportType.runTask(
|
||||
'pngJobId',
|
||||
getBasePayload({
|
||||
relativeUrl: '/app/kibana#/something',
|
||||
|
@ -82,12 +91,11 @@ test(`passes browserTimezone to generatePng`, async () => {
|
|||
});
|
||||
|
||||
test(`returns content_type of application/png`, async () => {
|
||||
const runTask = runTaskFnFactory(mockReporting, getMockLogger());
|
||||
const encryptedHeaders = await encryptHeaders({});
|
||||
|
||||
(generatePngObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from('foo') }));
|
||||
|
||||
const { content_type: contentType } = await runTask(
|
||||
const { content_type: contentType } = await mockPngExportType.runTask(
|
||||
'pngJobId',
|
||||
getBasePayload({ relativeUrl: '/app/kibana#/something', headers: encryptedHeaders }),
|
||||
cancellationToken,
|
||||
|
@ -100,9 +108,8 @@ test(`returns content of generatePng`, async () => {
|
|||
const testContent = 'raw string from get_screenhots';
|
||||
(generatePngObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) }));
|
||||
|
||||
const runTask = runTaskFnFactory(mockReporting, getMockLogger());
|
||||
const encryptedHeaders = await encryptHeaders({});
|
||||
await runTask(
|
||||
await mockPngExportType.runTask(
|
||||
'pngJobId',
|
||||
getBasePayload({ relativeUrl: '/app/kibana#/something', headers: encryptedHeaders }),
|
||||
cancellationToken,
|
122
x-pack/plugins/reporting/server/export_types/png/png.ts
Normal file
122
x-pack/plugins/reporting/server/export_types/png/png.ts
Normal file
|
@ -0,0 +1,122 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import { LicenseType } from '@kbn/licensing-plugin/server';
|
||||
import { CancellationToken, TaskRunResult } from '@kbn/reporting-common';
|
||||
import apm from 'elastic-apm-node';
|
||||
import { Writable } from 'stream';
|
||||
import {
|
||||
fromEventPattern,
|
||||
mergeMap,
|
||||
finalize,
|
||||
takeUntil,
|
||||
tap,
|
||||
map,
|
||||
Observable,
|
||||
of,
|
||||
lastValueFrom,
|
||||
} from 'rxjs';
|
||||
import { JobParamsPNGDeprecated, TaskPayloadPNG } from './types';
|
||||
import { decryptJobHeaders, ExportType, generatePngObservable, getFullUrls } from '../common';
|
||||
import { validateUrls } from '../common/validate_urls';
|
||||
import {
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
PNG_JOB_TYPE,
|
||||
REPORTING_TRANSACTION_TYPE,
|
||||
} from '../../../common/constants';
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* Used for the Reporting Diagnostic
|
||||
*/
|
||||
export class PngV1ExportType extends ExportType<JobParamsPNGDeprecated, TaskPayloadPNG> {
|
||||
id = 'png';
|
||||
name = 'PNG';
|
||||
jobType = PNG_JOB_TYPE;
|
||||
jobContentEncoding? = 'base64' as const;
|
||||
jobContentExtension = 'png' as const;
|
||||
validLicenses: LicenseType[] = [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
];
|
||||
|
||||
constructor(...args: ConstructorParameters<typeof ExportType>) {
|
||||
super(...args);
|
||||
this.logger = this.logger.get('png-export-v1');
|
||||
}
|
||||
|
||||
public createJob = async (jobParams: JobParamsPNGDeprecated) => {
|
||||
validateUrls([jobParams.relativeUrl]);
|
||||
return {
|
||||
...jobParams,
|
||||
isDeprecated: true,
|
||||
forceNow: new Date().toISOString(),
|
||||
};
|
||||
};
|
||||
|
||||
public runTask = (
|
||||
jobId: string,
|
||||
job: TaskPayloadPNG,
|
||||
cancellationToken: CancellationToken,
|
||||
stream: Writable
|
||||
) => {
|
||||
const apmTrans = apm.startTransaction('execute-job-png', REPORTING_TRANSACTION_TYPE);
|
||||
const apmGetAssets = apmTrans?.startSpan('get-assets', 'setup');
|
||||
let apmGeneratePng: { end: () => void } | null | undefined;
|
||||
const { encryptionKey } = this.config;
|
||||
const jobLogger = this.logger.get(`execute:${jobId}`);
|
||||
|
||||
const process$: Observable<TaskRunResult> = of(1).pipe(
|
||||
mergeMap(() => decryptJobHeaders(encryptionKey, job.headers, jobLogger)),
|
||||
mergeMap((headers) => {
|
||||
const [url] = getFullUrls(this.getServerInfo(), this.config, job);
|
||||
|
||||
apmGetAssets?.end();
|
||||
apmGeneratePng = apmTrans?.startSpan('generate-png-pipeline', 'execute');
|
||||
return generatePngObservable(
|
||||
() =>
|
||||
this.startDeps.reporting.getScreenshots({
|
||||
headers,
|
||||
urls: [url],
|
||||
browserTimezone: job.browserTimezone,
|
||||
layout: {
|
||||
...job.layout,
|
||||
id: 'preserve_layout',
|
||||
},
|
||||
}),
|
||||
jobLogger,
|
||||
{
|
||||
headers,
|
||||
urls: [url],
|
||||
browserTimezone: job.browserTimezone,
|
||||
layout: {
|
||||
...job.layout,
|
||||
id: 'preserve_layout',
|
||||
},
|
||||
}
|
||||
);
|
||||
}),
|
||||
tap(({ buffer }) => stream.write(buffer)),
|
||||
map(({ metrics, warnings }) => ({
|
||||
content_type: 'image/png',
|
||||
metrics: { png: metrics },
|
||||
warnings,
|
||||
})),
|
||||
tap({ error: (error) => jobLogger.error(error) }),
|
||||
finalize(() => apmGeneratePng?.end())
|
||||
);
|
||||
|
||||
const stop$ = fromEventPattern(cancellationToken.on);
|
||||
return lastValueFrom(process$.pipe(takeUntil(stop$)));
|
||||
};
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { CreateJobFn, CreateJobFnFactory } from '../../types';
|
||||
import { JobParamsPNGV2, TaskPayloadPNGV2 } from './types';
|
||||
|
||||
export const createJobFnFactory: CreateJobFnFactory<CreateJobFn<JobParamsPNGV2, TaskPayloadPNGV2>> =
|
||||
function createJobFactoryFn() {
|
||||
return async function createJob({ locatorParams, ...jobParams }) {
|
||||
return {
|
||||
...jobParams,
|
||||
locatorParams: [locatorParams],
|
||||
forceNow: new Date().toISOString(),
|
||||
};
|
||||
};
|
||||
};
|
|
@ -1,70 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { TaskRunResult } from '@kbn/reporting-common';
|
||||
import apm from 'elastic-apm-node';
|
||||
import * as Rx from 'rxjs';
|
||||
import { finalize, map, mergeMap, takeUntil, tap } from 'rxjs/operators';
|
||||
import { REPORTING_TRANSACTION_TYPE } from '../../../common/constants';
|
||||
import { RunTaskFn, RunTaskFnFactory } from '../../types';
|
||||
import { decryptJobHeaders, generatePngObservable } from '../common';
|
||||
import { getFullRedirectAppUrl } from '../common/v2/get_full_redirect_app_url';
|
||||
import { TaskPayloadPNGV2 } from './types';
|
||||
|
||||
export const runTaskFnFactory: RunTaskFnFactory<RunTaskFn<TaskPayloadPNGV2>> =
|
||||
function executeJobFactoryFn(reporting, parentLogger) {
|
||||
const { encryptionKey } = reporting.getConfig();
|
||||
|
||||
return function runTask(jobId, job, cancellationToken, stream) {
|
||||
const apmTrans = apm.startTransaction('execute-job-png-v2', REPORTING_TRANSACTION_TYPE);
|
||||
const apmGetAssets = apmTrans?.startSpan('get-assets', 'setup');
|
||||
let apmGeneratePng: { end: () => void } | null | undefined;
|
||||
|
||||
const jobLogger = parentLogger.get(`execute:${jobId}`);
|
||||
const process$: Rx.Observable<TaskRunResult> = Rx.of(1).pipe(
|
||||
mergeMap(() => decryptJobHeaders(encryptionKey, job.headers, jobLogger)),
|
||||
mergeMap((headers) => {
|
||||
const url = getFullRedirectAppUrl(
|
||||
reporting.getConfig(),
|
||||
reporting.getServerInfo(),
|
||||
job.spaceId,
|
||||
job.forceNow
|
||||
);
|
||||
const [locatorParams] = job.locatorParams;
|
||||
|
||||
apmGetAssets?.end();
|
||||
apmGeneratePng = apmTrans?.startSpan('generate-png-pipeline', 'execute');
|
||||
|
||||
const screenshotFn = () =>
|
||||
reporting.getScreenshots({
|
||||
headers,
|
||||
browserTimezone: job.browserTimezone,
|
||||
layout: { ...job.layout, id: 'preserve_layout' },
|
||||
urls: [[url, locatorParams]],
|
||||
});
|
||||
|
||||
return generatePngObservable(screenshotFn, jobLogger, {
|
||||
headers,
|
||||
browserTimezone: job.browserTimezone,
|
||||
layout: { ...job.layout, id: 'preserve_layout' },
|
||||
urls: [[url, locatorParams]],
|
||||
});
|
||||
}),
|
||||
tap(({ buffer }) => stream.write(buffer)),
|
||||
map(({ metrics, warnings }) => ({
|
||||
content_type: 'image/png',
|
||||
metrics: { png: metrics },
|
||||
warnings,
|
||||
})),
|
||||
tap({ error: (error) => jobLogger.error(error) }),
|
||||
finalize(() => apmGeneratePng?.end())
|
||||
);
|
||||
|
||||
const stop$ = Rx.fromEventPattern(cancellationToken.on);
|
||||
return Rx.lastValueFrom(process$.pipe(takeUntil(stop$)));
|
||||
};
|
||||
};
|
|
@ -5,35 +5,4 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
PNG_JOB_TYPE_V2 as jobType,
|
||||
} from '../../../common/constants';
|
||||
import { CreateJobFn, ExportTypeDefinition, RunTaskFn } from '../../types';
|
||||
import { createJobFnFactory } from './create_job';
|
||||
import { runTaskFnFactory } from './execute_job';
|
||||
import { metadata } from './metadata';
|
||||
import { JobParamsPNGV2, TaskPayloadPNGV2 } from './types';
|
||||
|
||||
export const getExportType = (): ExportTypeDefinition<
|
||||
CreateJobFn<JobParamsPNGV2>,
|
||||
RunTaskFn<TaskPayloadPNGV2>
|
||||
> => ({
|
||||
...metadata,
|
||||
jobType,
|
||||
jobContentEncoding: 'base64',
|
||||
jobContentExtension: 'PNG',
|
||||
createJobFnFactory,
|
||||
runTaskFnFactory,
|
||||
validLicenses: [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
],
|
||||
});
|
||||
export { PngExportType } from './png_v2';
|
||||
|
|
|
@ -6,28 +6,30 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { coreMock, elasticsearchServiceMock, loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { Writable } from 'stream';
|
||||
import { ReportingCore } from '../..';
|
||||
import { CancellationToken } from '@kbn/reporting-common';
|
||||
import { ScreenshottingStart } from '@kbn/screenshotting-plugin/server';
|
||||
import { ReportingCore } from '../..';
|
||||
import { LocatorParams } from '../../../common/types';
|
||||
import { cryptoFactory } from '../../lib';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../test_helpers';
|
||||
import { generatePngObservable } from '../common';
|
||||
import { runTaskFnFactory } from './execute_job';
|
||||
import { TaskPayloadPNGV2 } from './types';
|
||||
import { PngExportType } from './png_v2';
|
||||
|
||||
jest.mock('../common/generate_png');
|
||||
|
||||
let content: string;
|
||||
let mockReporting: ReportingCore;
|
||||
let mockReportingCore: ReportingCore;
|
||||
let mockPngExportType: PngExportType;
|
||||
let stream: jest.Mocked<Writable>;
|
||||
|
||||
const cancellationToken = {
|
||||
on: jest.fn(),
|
||||
} as unknown as CancellationToken;
|
||||
|
||||
const getMockLogger = () => loggingSystemMock.createLogger();
|
||||
const mockLogger = loggingSystemMock.createLogger();
|
||||
|
||||
const mockEncryptionKey = 'abcabcsecuresecret';
|
||||
const encryptHeaders = async (headers: Record<string, string>) => {
|
||||
|
@ -41,7 +43,7 @@ beforeEach(async () => {
|
|||
content = '';
|
||||
stream = { write: jest.fn((chunk) => (content += chunk)) } as unknown as typeof stream;
|
||||
|
||||
const mockReportingConfig = createMockConfigSchema({
|
||||
const configType = createMockConfigSchema({
|
||||
encryptionKey: mockEncryptionKey,
|
||||
queue: {
|
||||
indexInterval: 'daily',
|
||||
|
@ -49,7 +51,23 @@ beforeEach(async () => {
|
|||
},
|
||||
});
|
||||
|
||||
mockReporting = await createMockReportingCore(mockReportingConfig);
|
||||
mockReportingCore = await createMockReportingCore(configType);
|
||||
const context = coreMock.createPluginInitializerContext(configType);
|
||||
|
||||
const mockCoreSetup = coreMock.createSetup();
|
||||
const mockCoreStart = coreMock.createStart();
|
||||
|
||||
mockPngExportType = new PngExportType(mockCoreSetup, configType, mockLogger, context);
|
||||
mockPngExportType.setup({
|
||||
basePath: { set: jest.fn() },
|
||||
});
|
||||
mockPngExportType.start({
|
||||
savedObjects: mockCoreStart.savedObjects,
|
||||
uiSettings: mockCoreStart.uiSettings,
|
||||
screenshotting: {} as unknown as ScreenshottingStart,
|
||||
esClient: elasticsearchServiceMock.createClusterClient(),
|
||||
reporting: mockReportingCore.getContract(),
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => (generatePngObservable as jest.Mock).mockReset());
|
||||
|
@ -58,9 +76,8 @@ test(`passes browserTimezone to generatePng`, async () => {
|
|||
const encryptedHeaders = await encryptHeaders({});
|
||||
(generatePngObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from('') }));
|
||||
|
||||
const runTask = runTaskFnFactory(mockReporting, getMockLogger());
|
||||
const browserTimezone = 'UTC';
|
||||
await runTask(
|
||||
await mockPngExportType.runTask(
|
||||
'pngJobId',
|
||||
getBasePayload({
|
||||
forceNow: 'test',
|
||||
|
@ -89,12 +106,11 @@ test(`passes browserTimezone to generatePng`, async () => {
|
|||
});
|
||||
|
||||
test(`returns content_type of application/png`, async () => {
|
||||
const runTask = runTaskFnFactory(mockReporting, getMockLogger());
|
||||
const encryptedHeaders = await encryptHeaders({});
|
||||
|
||||
(generatePngObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from('foo') }));
|
||||
|
||||
const { content_type: contentType } = await runTask(
|
||||
const { content_type: contentType } = await mockPngExportType.runTask(
|
||||
'pngJobId',
|
||||
getBasePayload({
|
||||
locatorParams: [{ version: 'test', id: 'test' }] as LocatorParams[],
|
||||
|
@ -110,9 +126,8 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => {
|
|||
const testContent = 'raw string from get_screenhots';
|
||||
(generatePngObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) }));
|
||||
|
||||
const runTask = runTaskFnFactory(mockReporting, getMockLogger());
|
||||
const encryptedHeaders = await encryptHeaders({});
|
||||
await runTask(
|
||||
await mockPngExportType.runTask(
|
||||
'pngJobId',
|
||||
getBasePayload({
|
||||
locatorParams: [{ version: 'test', id: 'test' }] as LocatorParams[],
|
135
x-pack/plugins/reporting/server/export_types/png_v2/png_v2.ts
Normal file
135
x-pack/plugins/reporting/server/export_types/png_v2/png_v2.ts
Normal file
|
@ -0,0 +1,135 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
import apm from 'elastic-apm-node';
|
||||
import { LicenseType } from '@kbn/licensing-plugin/server';
|
||||
import { CancellationToken, TaskRunResult } from '@kbn/reporting-common';
|
||||
import { Writable } from 'stream';
|
||||
import {
|
||||
finalize,
|
||||
fromEventPattern,
|
||||
lastValueFrom,
|
||||
map,
|
||||
mergeMap,
|
||||
Observable,
|
||||
of,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs';
|
||||
import { SerializableRecord } from '@kbn/utility-types';
|
||||
import { LocatorParams } from '../../../common';
|
||||
import {
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
PNG_JOB_TYPE_V2,
|
||||
PNG_REPORT_TYPE_V2,
|
||||
REPORTING_TRANSACTION_TYPE,
|
||||
} from '../../../common/constants';
|
||||
import { decryptJobHeaders, ExportType, generatePngObservable } from '../common';
|
||||
import { JobParamsPNGV2, TaskPayloadPNGV2 } from './types';
|
||||
import { getFullRedirectAppUrl } from '../common/v2/get_full_redirect_app_url';
|
||||
|
||||
export class PngExportType extends ExportType<JobParamsPNGV2, TaskPayloadPNGV2> {
|
||||
id = PNG_REPORT_TYPE_V2;
|
||||
name = 'PNG';
|
||||
jobType = PNG_JOB_TYPE_V2;
|
||||
jobContentEncoding = 'base64' as const;
|
||||
jobContentExtension = 'png' as const;
|
||||
validLicenses: LicenseType[] = [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
];
|
||||
|
||||
constructor(...args: ConstructorParameters<typeof ExportType>) {
|
||||
super(...args);
|
||||
this.logger = this.logger.get('png-export-v2');
|
||||
}
|
||||
|
||||
/**
|
||||
* @params JobParamsPNGV2
|
||||
* @returns jobParams
|
||||
*/
|
||||
public createJob = async ({ locatorParams, ...jobParams }: JobParamsPNGV2) => {
|
||||
return {
|
||||
...jobParams,
|
||||
locatorParams: [locatorParams] as unknown as LocatorParams<SerializableRecord>,
|
||||
isDeprecated: false,
|
||||
browserTimezone: jobParams.browserTimezone,
|
||||
forceNow: new Date().toISOString(),
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param jobId
|
||||
* @param payload
|
||||
* @param cancellationToken
|
||||
* @param stream
|
||||
*/
|
||||
public runTask = (
|
||||
jobId: string,
|
||||
payload: TaskPayloadPNGV2,
|
||||
cancellationToken: CancellationToken,
|
||||
stream: Writable
|
||||
) => {
|
||||
const jobLogger = this.logger.get(`execute-job:${jobId}`);
|
||||
const apmTrans = apm.startTransaction('execute-job-pdf-v2', REPORTING_TRANSACTION_TYPE);
|
||||
const apmGetAssets = apmTrans?.startSpan('get-assets', 'setup');
|
||||
let apmGeneratePng: { end: () => void } | null | undefined;
|
||||
const { encryptionKey } = this.config;
|
||||
|
||||
const process$: Observable<TaskRunResult> = of(1).pipe(
|
||||
mergeMap(() => decryptJobHeaders(encryptionKey, payload.headers, jobLogger)),
|
||||
mergeMap((headers) => {
|
||||
const url = getFullRedirectAppUrl(
|
||||
this.config,
|
||||
this.getServerInfo(),
|
||||
payload.spaceId,
|
||||
payload.forceNow
|
||||
);
|
||||
|
||||
const [locatorParams] = payload.locatorParams;
|
||||
|
||||
apmGetAssets?.end();
|
||||
apmGeneratePng = apmTrans?.startSpan('generate-png-pipeline', 'execute');
|
||||
|
||||
return generatePngObservable(
|
||||
() =>
|
||||
this.startDeps.reporting.getScreenshots({
|
||||
format: 'png',
|
||||
headers,
|
||||
layout: { ...payload.layout, id: 'preserve_layout' },
|
||||
urls: [[url, locatorParams]],
|
||||
}),
|
||||
jobLogger,
|
||||
{
|
||||
headers,
|
||||
browserTimezone: payload.browserTimezone,
|
||||
layout: { ...payload.layout, id: 'preserve_layout' },
|
||||
urls: [[url, locatorParams]],
|
||||
}
|
||||
);
|
||||
}),
|
||||
tap(({ buffer }) => stream.write(buffer)),
|
||||
map(({ metrics, warnings }) => ({
|
||||
content_type: 'image/png',
|
||||
metrics: { png: metrics },
|
||||
warnings,
|
||||
})),
|
||||
tap({ error: (error) => jobLogger.error(error) }),
|
||||
finalize(() => apmGeneratePng?.end())
|
||||
);
|
||||
|
||||
const stop$ = fromEventPattern(cancellationToken.on);
|
||||
return lastValueFrom(process$.pipe(takeUntil(stop$)));
|
||||
};
|
||||
}
|
|
@ -1,28 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { CreateJobFn, CreateJobFnFactory } from '../../../types';
|
||||
import { validateUrls } from '../../common';
|
||||
import { JobParamsPDFDeprecated, TaskPayloadPDF } from '../types';
|
||||
|
||||
export const createJobFnFactory: CreateJobFnFactory<
|
||||
CreateJobFn<JobParamsPDFDeprecated, TaskPayloadPDF>
|
||||
> = function createJobFactoryFn() {
|
||||
return async function createJobFn(
|
||||
{ relativeUrls, ...jobParams }: JobParamsPDFDeprecated // relativeUrls does not belong in the payload of PDFV1
|
||||
) {
|
||||
validateUrls(relativeUrls);
|
||||
|
||||
// return the payload
|
||||
return {
|
||||
...jobParams,
|
||||
isDeprecated: true,
|
||||
forceNow: new Date().toISOString(),
|
||||
objects: relativeUrls.map((u) => ({ relativeUrl: u })),
|
||||
};
|
||||
};
|
||||
};
|
|
@ -1,85 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { TaskRunResult } from '@kbn/reporting-common';
|
||||
import apm from 'elastic-apm-node';
|
||||
import * as Rx from 'rxjs';
|
||||
import { catchError, map, mergeMap, takeUntil, tap } from 'rxjs/operators';
|
||||
import { REPORTING_TRANSACTION_TYPE } from '../../../../common/constants';
|
||||
import { RunTaskFn, RunTaskFnFactory } from '../../../types';
|
||||
import { decryptJobHeaders, getCustomLogo, getFullUrls } from '../../common';
|
||||
import { generatePdfObservable } from '../lib/generate_pdf';
|
||||
import { TaskPayloadPDF } from '../types';
|
||||
|
||||
export const runTaskFnFactory: RunTaskFnFactory<RunTaskFn<TaskPayloadPDF>> =
|
||||
function executeJobFactoryFn(reporting, parentLogger) {
|
||||
const { encryptionKey } = reporting.getConfig();
|
||||
|
||||
return async function runTask(jobId, job, cancellationToken, stream) {
|
||||
const jobLogger = parentLogger.get(`execute-job:${jobId}`);
|
||||
const apmTrans = apm.startTransaction('execute-job-pdf', REPORTING_TRANSACTION_TYPE);
|
||||
const apmGetAssets = apmTrans?.startSpan('get-assets', 'setup');
|
||||
let apmGeneratePdf: { end: () => void } | null | undefined;
|
||||
|
||||
const process$: Rx.Observable<TaskRunResult> = Rx.of(1).pipe(
|
||||
mergeMap(() => decryptJobHeaders(encryptionKey, job.headers, jobLogger)),
|
||||
mergeMap(async (headers) => {
|
||||
const fakeRequest = reporting.getFakeRequest(headers, job.spaceId, jobLogger);
|
||||
const uiSettingsClient = await reporting.getUiSettingsClient(fakeRequest);
|
||||
return getCustomLogo(uiSettingsClient, headers);
|
||||
}),
|
||||
mergeMap(({ headers, logo }) => {
|
||||
const urls = getFullUrls(reporting.getServerInfo(), reporting.getConfig(), job);
|
||||
|
||||
const { browserTimezone, layout, title } = job;
|
||||
apmGetAssets?.end();
|
||||
|
||||
apmGeneratePdf = apmTrans?.startSpan('generate-pdf-pipeline', 'execute');
|
||||
// make a new function that will call reporting.getScreenshots
|
||||
const snapshotFn = () =>
|
||||
reporting.getScreenshots({
|
||||
format: 'pdf',
|
||||
title,
|
||||
logo,
|
||||
urls,
|
||||
browserTimezone,
|
||||
headers,
|
||||
layout,
|
||||
});
|
||||
return generatePdfObservable(snapshotFn, {
|
||||
format: 'pdf',
|
||||
title,
|
||||
logo,
|
||||
urls,
|
||||
browserTimezone,
|
||||
headers,
|
||||
layout,
|
||||
});
|
||||
}),
|
||||
tap(({ buffer }) => {
|
||||
apmGeneratePdf?.end();
|
||||
if (buffer) {
|
||||
stream.write(buffer);
|
||||
}
|
||||
}),
|
||||
map(({ metrics, warnings }) => ({
|
||||
content_type: 'application/pdf',
|
||||
metrics: { pdf: metrics },
|
||||
warnings,
|
||||
})),
|
||||
catchError((err) => {
|
||||
jobLogger.error(err);
|
||||
return Rx.throwError(err);
|
||||
})
|
||||
);
|
||||
|
||||
const stop$ = Rx.fromEventPattern(cancellationToken.on);
|
||||
|
||||
apmTrans?.end();
|
||||
return Rx.lastValueFrom(process$.pipe(takeUntil(stop$)));
|
||||
};
|
||||
};
|
|
@ -5,35 +5,4 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
PDF_JOB_TYPE as jobType,
|
||||
} from '../../../common/constants';
|
||||
import { CreateJobFn, ExportTypeDefinition, RunTaskFn } from '../../types';
|
||||
import { createJobFnFactory } from './create_job';
|
||||
import { runTaskFnFactory } from './execute_job';
|
||||
import { metadata } from './metadata';
|
||||
import { JobParamsPDFDeprecated, TaskPayloadPDF } from './types';
|
||||
|
||||
export const getExportType = (): ExportTypeDefinition<
|
||||
CreateJobFn<JobParamsPDFDeprecated>,
|
||||
RunTaskFn<TaskPayloadPDF>
|
||||
> => ({
|
||||
...metadata,
|
||||
jobType,
|
||||
jobContentEncoding: 'base64',
|
||||
jobContentExtension: 'pdf',
|
||||
createJobFnFactory,
|
||||
runTaskFnFactory,
|
||||
validLicenses: [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
],
|
||||
});
|
||||
export { PdfV1ExportType } from './printable_pdf';
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const metadata = {
|
||||
id: 'printablePdf',
|
||||
name: 'PDF',
|
||||
};
|
|
@ -6,27 +6,27 @@
|
|||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { coreMock, elasticsearchServiceMock, loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { Writable } from 'stream';
|
||||
import { ReportingCore } from '../../..';
|
||||
import { CancellationToken } from '@kbn/reporting-common';
|
||||
import { cryptoFactory } from '../../../lib';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../../test_helpers';
|
||||
import { generatePdfObservable } from '../lib/generate_pdf';
|
||||
import { TaskPayloadPDF } from '../types';
|
||||
import { runTaskFnFactory } from '.';
|
||||
import { generatePdfObservable } from './lib/generate_pdf';
|
||||
import { cryptoFactory } from '../../lib/crypto';
|
||||
import { TaskPayloadPDF } from './types';
|
||||
import { PdfV1ExportType } from './printable_pdf';
|
||||
import { ScreenshottingStart } from '@kbn/screenshotting-plugin/server';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../test_helpers';
|
||||
|
||||
jest.mock('../lib/generate_pdf');
|
||||
jest.mock('./lib/generate_pdf');
|
||||
|
||||
let content: string;
|
||||
let mockReporting: ReportingCore;
|
||||
let mockPdfExportType: PdfV1ExportType;
|
||||
let stream: jest.Mocked<Writable>;
|
||||
|
||||
const cancellationToken = {
|
||||
on: jest.fn(),
|
||||
} as unknown as CancellationToken;
|
||||
|
||||
const getMockLogger = () => loggingSystemMock.createLogger();
|
||||
const mockLogger = loggingSystemMock.createLogger();
|
||||
|
||||
const mockEncryptionKey = 'testencryptionkey';
|
||||
const encryptHeaders = async (headers: Record<string, string>) => {
|
||||
|
@ -39,17 +39,25 @@ const getBasePayload = (baseObj: any) => baseObj as TaskPayloadPDF;
|
|||
beforeEach(async () => {
|
||||
content = '';
|
||||
stream = { write: jest.fn((chunk) => (content += chunk)) } as unknown as typeof stream;
|
||||
const configType = createMockConfigSchema({ encryptionKey: mockEncryptionKey });
|
||||
const context = coreMock.createPluginInitializerContext(configType);
|
||||
|
||||
const reportingConfig = {
|
||||
'server.basePath': '/sbp',
|
||||
index: '.reports-test',
|
||||
encryptionKey: mockEncryptionKey,
|
||||
'kibanaServer.hostname': 'localhost',
|
||||
'kibanaServer.port': 5601,
|
||||
'kibanaServer.protocol': 'http',
|
||||
};
|
||||
const mockSchema = createMockConfigSchema(reportingConfig);
|
||||
mockReporting = await createMockReportingCore(mockSchema);
|
||||
const mockCoreSetup = coreMock.createSetup();
|
||||
const mockCoreStart = coreMock.createStart();
|
||||
const mockReportingCore = await createMockReportingCore(createMockConfigSchema());
|
||||
|
||||
mockPdfExportType = new PdfV1ExportType(mockCoreSetup, configType, mockLogger, context);
|
||||
|
||||
mockPdfExportType.setup({
|
||||
basePath: { set: jest.fn() },
|
||||
});
|
||||
mockPdfExportType.start({
|
||||
esClient: elasticsearchServiceMock.createClusterClient(),
|
||||
savedObjects: mockCoreStart.savedObjects,
|
||||
uiSettings: mockCoreStart.uiSettings,
|
||||
screenshotting: {} as unknown as ScreenshottingStart,
|
||||
reporting: mockReportingCore.getContract(),
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => (generatePdfObservable as jest.Mock).mockReset());
|
||||
|
@ -58,9 +66,8 @@ test(`passes browserTimezone to generatePdf`, async () => {
|
|||
const encryptedHeaders = await encryptHeaders({});
|
||||
(generatePdfObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from('') }));
|
||||
|
||||
const runTask = runTaskFnFactory(mockReporting, getMockLogger());
|
||||
const browserTimezone = 'UTC';
|
||||
await runTask(
|
||||
await mockPdfExportType.runTask(
|
||||
'pdfJobId',
|
||||
getBasePayload({
|
||||
title: 'PDF Params Timezone Test',
|
||||
|
@ -79,13 +86,11 @@ test(`passes browserTimezone to generatePdf`, async () => {
|
|||
});
|
||||
|
||||
test(`returns content_type of application/pdf`, async () => {
|
||||
const logger = getMockLogger();
|
||||
const runTask = runTaskFnFactory(mockReporting, logger);
|
||||
const encryptedHeaders = await encryptHeaders({});
|
||||
|
||||
(generatePdfObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from('') }));
|
||||
|
||||
const { content_type: contentType } = await runTask(
|
||||
const { content_type: contentType } = await mockPdfExportType.runTask(
|
||||
'pdfJobId',
|
||||
getBasePayload({ objects: [], headers: encryptedHeaders }),
|
||||
cancellationToken,
|
||||
|
@ -98,9 +103,8 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => {
|
|||
const testContent = 'test content';
|
||||
(generatePdfObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) }));
|
||||
|
||||
const runTask = runTaskFnFactory(mockReporting, getMockLogger());
|
||||
const encryptedHeaders = await encryptHeaders({});
|
||||
await runTask(
|
||||
await mockPdfExportType.runTask(
|
||||
'pdfJobId',
|
||||
getBasePayload({ objects: [], headers: encryptedHeaders }),
|
||||
cancellationToken,
|
|
@ -0,0 +1,129 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { LicenseType } from '@kbn/licensing-plugin/server';
|
||||
import { CancellationToken, TaskRunResult } from '@kbn/reporting-common';
|
||||
import { Writable } from 'stream';
|
||||
import apm from 'elastic-apm-node';
|
||||
import { catchError, map, mergeMap, takeUntil, tap } from 'rxjs/operators';
|
||||
import { fromEventPattern, lastValueFrom, Observable, of, throwError } from 'rxjs';
|
||||
import { JobParamsPDFDeprecated } from '../../../common/types';
|
||||
import {
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
PDF_JOB_TYPE,
|
||||
REPORTING_TRANSACTION_TYPE,
|
||||
} from '../../../common/constants';
|
||||
import { decryptJobHeaders, ExportType, getCustomLogo, getFullUrls, validateUrls } from '../common';
|
||||
import { TaskPayloadPDF } from './types';
|
||||
import { generatePdfObservable } from './lib/generate_pdf';
|
||||
|
||||
export class PdfV1ExportType extends ExportType<JobParamsPDFDeprecated, TaskPayloadPDF> {
|
||||
id = 'printablePdf';
|
||||
name = 'PDF';
|
||||
jobType = PDF_JOB_TYPE;
|
||||
jobContentEncoding? = 'base64' as const;
|
||||
jobContentExtension = 'pdf' as const;
|
||||
validLicenses: LicenseType[] = [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
];
|
||||
|
||||
constructor(...args: ConstructorParameters<typeof ExportType>) {
|
||||
super(...args);
|
||||
this.logger = this.logger.get('png-export-v1');
|
||||
}
|
||||
|
||||
public createJob = async (
|
||||
{ relativeUrls, ...jobParams }: JobParamsPDFDeprecated // relativeUrls does not belong in the payload of PDFV1
|
||||
) => {
|
||||
validateUrls(relativeUrls);
|
||||
|
||||
// return the payload
|
||||
return {
|
||||
...jobParams,
|
||||
isDeprecated: true,
|
||||
forceNow: new Date().toISOString(),
|
||||
objects: relativeUrls.map((u) => ({ relativeUrl: u })),
|
||||
};
|
||||
};
|
||||
|
||||
public runTask = async (
|
||||
jobId: string,
|
||||
job: TaskPayloadPDF,
|
||||
cancellationToken: CancellationToken,
|
||||
stream: Writable
|
||||
) => {
|
||||
const jobLogger = this.logger.get(`execute-job:${jobId}`);
|
||||
const apmTrans = apm.startTransaction('execute-job-pdf', REPORTING_TRANSACTION_TYPE);
|
||||
const apmGetAssets = apmTrans?.startSpan('get-assets', 'setup');
|
||||
let apmGeneratePdf: { end: () => void } | null | undefined;
|
||||
|
||||
const process$: Observable<TaskRunResult> = of(1).pipe(
|
||||
mergeMap(() => decryptJobHeaders(this.config.encryptionKey, job.headers, jobLogger)),
|
||||
mergeMap(async (headers) => {
|
||||
const fakeRequest = this.getFakeRequest(headers, job.spaceId, jobLogger);
|
||||
const uiSettingsClient = await this.getUiSettingsClient(fakeRequest);
|
||||
return getCustomLogo(uiSettingsClient, headers);
|
||||
}),
|
||||
mergeMap(({ headers, logo }) => {
|
||||
const urls = getFullUrls(this.getServerInfo(), this.config, job);
|
||||
|
||||
const { browserTimezone, layout, title } = job;
|
||||
apmGetAssets?.end();
|
||||
|
||||
apmGeneratePdf = apmTrans?.startSpan('generate-pdf-pipeline', 'execute');
|
||||
// make a new function that will call reporting.getScreenshots
|
||||
const snapshotFn = () =>
|
||||
this.startDeps.reporting.getScreenshots({
|
||||
format: 'pdf',
|
||||
title,
|
||||
logo,
|
||||
urls,
|
||||
browserTimezone,
|
||||
headers,
|
||||
layout,
|
||||
});
|
||||
return generatePdfObservable(snapshotFn, {
|
||||
format: 'pdf',
|
||||
title,
|
||||
logo,
|
||||
urls,
|
||||
browserTimezone,
|
||||
headers,
|
||||
layout,
|
||||
});
|
||||
}),
|
||||
tap(({ buffer }) => {
|
||||
apmGeneratePdf?.end();
|
||||
if (buffer) {
|
||||
stream.write(buffer);
|
||||
}
|
||||
}),
|
||||
map(({ metrics, warnings }) => ({
|
||||
content_type: 'application/pdf',
|
||||
metrics: { pdf: metrics },
|
||||
warnings,
|
||||
})),
|
||||
catchError((err: any) => {
|
||||
jobLogger.error(err);
|
||||
return throwError(err);
|
||||
})
|
||||
);
|
||||
|
||||
const stop$ = fromEventPattern(cancellationToken.on);
|
||||
|
||||
apmTrans?.end();
|
||||
return lastValueFrom(process$.pipe(takeUntil(stop$)));
|
||||
};
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { CreateJobFn, CreateJobFnFactory } from '../../types';
|
||||
import { JobParamsPDFV2, TaskPayloadPDFV2 } from './types';
|
||||
|
||||
export const createJobFnFactory: CreateJobFnFactory<CreateJobFn<JobParamsPDFV2, TaskPayloadPDFV2>> =
|
||||
function createJobFactoryFn() {
|
||||
return async function createJob(jobParams) {
|
||||
return {
|
||||
...jobParams,
|
||||
forceNow: new Date().toISOString(),
|
||||
};
|
||||
};
|
||||
};
|
|
@ -1,102 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import apm from 'elastic-apm-node';
|
||||
import * as Rx from 'rxjs';
|
||||
import { catchError, map, mergeMap, takeUntil, tap } from 'rxjs/operators';
|
||||
import { TaskRunResult } from '@kbn/reporting-common';
|
||||
import { getFullRedirectAppUrl } from '../common/v2/get_full_redirect_app_url';
|
||||
import { UrlOrUrlLocatorTuple } from '../../../common/types';
|
||||
import { REPORTING_TRANSACTION_TYPE } from '../../../common/constants';
|
||||
import { RunTaskFn, RunTaskFnFactory } from '../../types';
|
||||
import { decryptJobHeaders, getCustomLogo } from '../common';
|
||||
import { generatePdfObservable } from './lib/generate_pdf';
|
||||
import { TaskPayloadPDFV2 } from './types';
|
||||
|
||||
export const runTaskFnFactory: RunTaskFnFactory<RunTaskFn<TaskPayloadPDFV2>> =
|
||||
function executeJobFactoryFn(reporting, parentLogger) {
|
||||
const { encryptionKey } = reporting.getConfig();
|
||||
|
||||
return async function runTask(jobId, job, cancellationToken, stream) {
|
||||
const jobLogger = parentLogger.get(`execute-job:${jobId}`);
|
||||
const apmTrans = apm.startTransaction('execute-job-pdf-v2', REPORTING_TRANSACTION_TYPE);
|
||||
const apmGetAssets = apmTrans?.startSpan('get-assets', 'setup');
|
||||
let apmGeneratePdf: { end: () => void } | null | undefined;
|
||||
|
||||
const process$: Rx.Observable<TaskRunResult> = Rx.of(1).pipe(
|
||||
mergeMap(() => decryptJobHeaders(encryptionKey, job.headers, jobLogger)),
|
||||
mergeMap(async (headers) => {
|
||||
const fakeRequest = reporting.getFakeRequest(headers, job.spaceId, jobLogger);
|
||||
const uiSettingsClient = await reporting.getUiSettingsClient(fakeRequest);
|
||||
return getCustomLogo(uiSettingsClient, headers);
|
||||
}),
|
||||
mergeMap(({ logo, headers }) => {
|
||||
const { browserTimezone, layout, title, locatorParams } = job;
|
||||
|
||||
const urls = locatorParams.map((locator) => [
|
||||
getFullRedirectAppUrl(
|
||||
reporting.getConfig(),
|
||||
reporting.getServerInfo(),
|
||||
job.spaceId,
|
||||
job.forceNow
|
||||
),
|
||||
locator,
|
||||
]) as UrlOrUrlLocatorTuple[];
|
||||
|
||||
const screenshotFn = () =>
|
||||
reporting.getScreenshots({
|
||||
format: 'pdf',
|
||||
title,
|
||||
logo,
|
||||
browserTimezone,
|
||||
headers,
|
||||
layout,
|
||||
urls,
|
||||
});
|
||||
apmGetAssets?.end();
|
||||
|
||||
apmGeneratePdf = apmTrans?.startSpan('generate-pdf-pipeline', 'execute');
|
||||
return generatePdfObservable(
|
||||
reporting.getConfig(),
|
||||
reporting.getServerInfo(),
|
||||
screenshotFn,
|
||||
job,
|
||||
locatorParams,
|
||||
{
|
||||
format: 'pdf',
|
||||
title,
|
||||
logo,
|
||||
browserTimezone,
|
||||
headers,
|
||||
layout,
|
||||
}
|
||||
);
|
||||
}),
|
||||
tap(({ buffer }) => {
|
||||
apmGeneratePdf?.end();
|
||||
|
||||
if (buffer) {
|
||||
stream.write(buffer);
|
||||
}
|
||||
}),
|
||||
map(({ metrics, warnings }) => ({
|
||||
content_type: 'application/pdf',
|
||||
metrics: { pdf: metrics },
|
||||
warnings,
|
||||
})),
|
||||
catchError((err) => {
|
||||
jobLogger.error(err);
|
||||
return Rx.throwError(err);
|
||||
})
|
||||
);
|
||||
|
||||
const stop$ = Rx.fromEventPattern(cancellationToken.on);
|
||||
|
||||
apmTrans?.end();
|
||||
return Rx.lastValueFrom(process$.pipe(takeUntil(stop$)));
|
||||
};
|
||||
};
|
|
@ -5,35 +5,4 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import {
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
PDF_JOB_TYPE_V2 as jobType,
|
||||
} from '../../../common/constants';
|
||||
import { CreateJobFn, ExportTypeDefinition, RunTaskFn } from '../../types';
|
||||
import { createJobFnFactory } from './create_job';
|
||||
import { runTaskFnFactory } from './execute_job';
|
||||
import { metadata } from './metadata';
|
||||
import { JobParamsPDFV2, TaskPayloadPDFV2 } from './types';
|
||||
|
||||
export const getExportType = (): ExportTypeDefinition<
|
||||
CreateJobFn<JobParamsPDFV2>,
|
||||
RunTaskFn<TaskPayloadPDFV2>
|
||||
> => ({
|
||||
...metadata,
|
||||
jobType,
|
||||
jobContentEncoding: 'base64',
|
||||
jobContentExtension: 'pdf',
|
||||
createJobFnFactory,
|
||||
runTaskFnFactory,
|
||||
validLicenses: [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
],
|
||||
});
|
||||
export { PdfExportType } from './printable_pdf_v2';
|
||||
|
|
|
@ -8,13 +8,13 @@
|
|||
import * as Rx from 'rxjs';
|
||||
import { mergeMap, tap } from 'rxjs/operators';
|
||||
import { PdfScreenshotResult } from '@kbn/screenshotting-plugin/server';
|
||||
import { TaskPayloadPDFV2 } from '../../../../common/types/export_types/printable_pdf_v2';
|
||||
import { ReportingServerInfo } from '../../../core';
|
||||
import { ReportingConfigType } from '../../../config';
|
||||
import type { LocatorParams, PdfMetrics, UrlOrUrlLocatorTuple } from '../../../../common/types';
|
||||
import type { PdfScreenshotOptions } from '../../../types';
|
||||
import { getFullRedirectAppUrl } from '../../common/v2/get_full_redirect_app_url';
|
||||
import { getTracker } from '../../common/pdf_tracker';
|
||||
import type { TaskPayloadPDFV2 } from '../types';
|
||||
|
||||
interface PdfResult {
|
||||
buffer: Uint8Array | null;
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const metadata = {
|
||||
id: 'printablePdfV2',
|
||||
name: 'PDF',
|
||||
};
|
|
@ -7,27 +7,27 @@
|
|||
|
||||
jest.mock('./lib/generate_pdf');
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import { loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { Writable } from 'stream';
|
||||
import { ReportingCore } from '../..';
|
||||
import { coreMock, elasticsearchServiceMock, loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { CancellationToken } from '@kbn/reporting-common';
|
||||
import { LocatorParams } from '../../../common/types';
|
||||
import type { ScreenshottingStart } from '@kbn/screenshotting-plugin/server';
|
||||
import * as Rx from 'rxjs';
|
||||
import type { Writable } from 'stream';
|
||||
import { PdfExportType } from '.';
|
||||
import type { LocatorParams } from '../../../common';
|
||||
import type { TaskPayloadPDFV2 } from '../../../common/types/export_types/printable_pdf_v2';
|
||||
import { cryptoFactory } from '../../lib';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../test_helpers';
|
||||
import { runTaskFnFactory } from './execute_job';
|
||||
import { generatePdfObservable } from './lib/generate_pdf';
|
||||
import { TaskPayloadPDFV2 } from './types';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../test_helpers';
|
||||
|
||||
let content: string;
|
||||
let mockReporting: ReportingCore;
|
||||
let mockPdfExportType: PdfExportType;
|
||||
let stream: jest.Mocked<Writable>;
|
||||
|
||||
const cancellationToken = {
|
||||
on: jest.fn(),
|
||||
} as unknown as CancellationToken;
|
||||
|
||||
const getMockLogger = () => loggingSystemMock.createLogger();
|
||||
const mockLogger = loggingSystemMock.createLogger();
|
||||
|
||||
const mockEncryptionKey = 'testencryptionkey';
|
||||
const encryptHeaders = async (headers: Record<string, string>) => {
|
||||
|
@ -45,16 +45,25 @@ beforeEach(async () => {
|
|||
content = '';
|
||||
stream = { write: jest.fn((chunk) => (content += chunk)) } as unknown as typeof stream;
|
||||
|
||||
const reportingConfig = {
|
||||
'server.basePath': '/sbp',
|
||||
index: '.reports-test',
|
||||
encryptionKey: mockEncryptionKey,
|
||||
'kibanaServer.hostname': 'localhost',
|
||||
'kibanaServer.port': 5601,
|
||||
'kibanaServer.protocol': 'http',
|
||||
};
|
||||
const mockSchema = createMockConfigSchema(reportingConfig);
|
||||
mockReporting = await createMockReportingCore(mockSchema);
|
||||
const configType = createMockConfigSchema({ encryptionKey: mockEncryptionKey });
|
||||
const context = coreMock.createPluginInitializerContext(configType);
|
||||
|
||||
const mockCoreSetup = coreMock.createSetup();
|
||||
const mockCoreStart = coreMock.createStart();
|
||||
const mockReportingCore = await createMockReportingCore(createMockConfigSchema());
|
||||
|
||||
mockPdfExportType = new PdfExportType(mockCoreSetup, configType, mockLogger, context);
|
||||
|
||||
mockPdfExportType.setup({
|
||||
basePath: { set: jest.fn() },
|
||||
});
|
||||
mockPdfExportType.start({
|
||||
esClient: elasticsearchServiceMock.createClusterClient(),
|
||||
savedObjects: mockCoreStart.savedObjects,
|
||||
uiSettings: mockCoreStart.uiSettings,
|
||||
screenshotting: {} as unknown as ScreenshottingStart,
|
||||
reporting: mockReportingCore.getContract(),
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => (generatePdfObservable as jest.Mock).mockReset());
|
||||
|
@ -63,9 +72,8 @@ test(`passes browserTimezone to generatePdf`, async () => {
|
|||
const encryptedHeaders = await encryptHeaders({});
|
||||
(generatePdfObservable as jest.Mock).mockReturnValue(Rx.of(Buffer.from('')));
|
||||
|
||||
const runTask = runTaskFnFactory(mockReporting, getMockLogger());
|
||||
const browserTimezone = 'UTC';
|
||||
await runTask(
|
||||
await mockPdfExportType.runTask(
|
||||
'pdfJobId',
|
||||
getBasePayload({
|
||||
forceNow: 'test',
|
||||
|
@ -89,13 +97,11 @@ test(`passes browserTimezone to generatePdf`, async () => {
|
|||
});
|
||||
|
||||
test(`returns content_type of application/pdf`, async () => {
|
||||
const logger = getMockLogger();
|
||||
const runTask = runTaskFnFactory(mockReporting, logger);
|
||||
const encryptedHeaders = await encryptHeaders({});
|
||||
|
||||
(generatePdfObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from('') }));
|
||||
|
||||
const { content_type: contentType } = await runTask(
|
||||
const { content_type: contentType } = await mockPdfExportType.runTask(
|
||||
'pdfJobId',
|
||||
getBasePayload({ locatorParams: [], headers: encryptedHeaders }),
|
||||
cancellationToken,
|
||||
|
@ -108,9 +114,8 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => {
|
|||
const testContent = 'test content';
|
||||
(generatePdfObservable as jest.Mock).mockReturnValue(Rx.of({ buffer: Buffer.from(testContent) }));
|
||||
|
||||
const runTask = runTaskFnFactory(mockReporting, getMockLogger());
|
||||
const encryptedHeaders = await encryptHeaders({});
|
||||
await runTask(
|
||||
await mockPdfExportType.runTask(
|
||||
'pdfJobId',
|
||||
getBasePayload({ locatorParams: [], headers: encryptedHeaders }),
|
||||
cancellationToken,
|
|
@ -0,0 +1,155 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { Headers } from '@kbn/core/server';
|
||||
import { CancellationToken, TaskRunResult } from '@kbn/reporting-common';
|
||||
import apm from 'elastic-apm-node';
|
||||
import * as Rx from 'rxjs';
|
||||
import { catchError, map, mergeMap, takeUntil, tap } from 'rxjs';
|
||||
import { Writable } from 'stream';
|
||||
import {
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_TRIAL,
|
||||
PDF_JOB_TYPE_V2,
|
||||
PDF_REPORT_TYPE_V2,
|
||||
REPORTING_TRANSACTION_TYPE,
|
||||
} from '../../../common/constants';
|
||||
import { JobParamsPDFV2, UrlOrUrlLocatorTuple } from '../../../common/types';
|
||||
import { TaskPayloadPDFV2 } from '../../../common/types/export_types/printable_pdf_v2';
|
||||
import { decryptJobHeaders, ExportType, getCustomLogo } from '../common';
|
||||
import { getFullRedirectAppUrl } from '../common/v2/get_full_redirect_app_url';
|
||||
import { generatePdfObservable } from './lib/generate_pdf';
|
||||
|
||||
export class PdfExportType extends ExportType<JobParamsPDFV2, TaskPayloadPDFV2> {
|
||||
id = PDF_REPORT_TYPE_V2;
|
||||
name = 'PDF';
|
||||
jobType = PDF_JOB_TYPE_V2;
|
||||
jobContentEncoding = 'base64' as const;
|
||||
jobContentExtension = 'pdf' as const;
|
||||
validLicenses = [
|
||||
LICENSE_TYPE_TRIAL,
|
||||
LICENSE_TYPE_CLOUD_STANDARD,
|
||||
LICENSE_TYPE_GOLD,
|
||||
LICENSE_TYPE_PLATINUM,
|
||||
LICENSE_TYPE_ENTERPRISE,
|
||||
];
|
||||
|
||||
constructor(...args: ConstructorParameters<typeof ExportType>) {
|
||||
super(...args);
|
||||
this.logger = this.logger.get('pdf-export-v2');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param JobParamsPDFV2
|
||||
* @returns jobParams
|
||||
*/
|
||||
public createJob = async ({ locatorParams, ...jobParams }: JobParamsPDFV2) => {
|
||||
return {
|
||||
...jobParams,
|
||||
locatorParams,
|
||||
isDeprecated: false,
|
||||
browserTimezone: jobParams.browserTimezone,
|
||||
forceNow: new Date().toISOString(),
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param jobId
|
||||
* @param payload
|
||||
* @param cancellationToken
|
||||
* @param stream
|
||||
*/
|
||||
public runTask = (
|
||||
jobId: string,
|
||||
payload: TaskPayloadPDFV2,
|
||||
cancellationToken: CancellationToken,
|
||||
stream: Writable
|
||||
) => {
|
||||
const jobLogger = this.logger.get(`execute-job:${jobId}`);
|
||||
const apmTrans = apm.startTransaction('execute-job-pdf-v2', REPORTING_TRANSACTION_TYPE);
|
||||
const apmGetAssets = apmTrans?.startSpan('get-assets', 'setup');
|
||||
let apmGeneratePdf: { end: () => void } | null | undefined;
|
||||
const { encryptionKey } = this.config;
|
||||
|
||||
const process$: Rx.Observable<TaskRunResult> = Rx.of(1).pipe(
|
||||
mergeMap(() => decryptJobHeaders(encryptionKey, payload.headers, jobLogger)),
|
||||
mergeMap(async (headers: Headers) => {
|
||||
const fakeRequest = this.getFakeRequest(headers, payload.spaceId, jobLogger);
|
||||
const uiSettingsClient = await this.getUiSettingsClient(fakeRequest);
|
||||
return await getCustomLogo(uiSettingsClient, headers);
|
||||
}),
|
||||
mergeMap(({ logo, headers }) => {
|
||||
const { browserTimezone, layout, title, locatorParams } = payload;
|
||||
let urls: UrlOrUrlLocatorTuple[];
|
||||
if (locatorParams) {
|
||||
urls = locatorParams.map((locator) => [
|
||||
getFullRedirectAppUrl(
|
||||
this.config,
|
||||
this.getServerInfo(),
|
||||
payload.spaceId,
|
||||
payload.forceNow
|
||||
),
|
||||
locator,
|
||||
]) as unknown as UrlOrUrlLocatorTuple[];
|
||||
}
|
||||
|
||||
apmGetAssets?.end();
|
||||
|
||||
apmGeneratePdf = apmTrans?.startSpan('generate-pdf-pipeline', 'execute');
|
||||
return generatePdfObservable(
|
||||
this.config,
|
||||
this.getServerInfo(),
|
||||
() =>
|
||||
this.startDeps.reporting.getScreenshots({
|
||||
format: 'pdf',
|
||||
title,
|
||||
logo,
|
||||
browserTimezone,
|
||||
headers,
|
||||
layout,
|
||||
urls,
|
||||
}),
|
||||
payload,
|
||||
locatorParams,
|
||||
{
|
||||
format: 'pdf',
|
||||
title,
|
||||
logo,
|
||||
browserTimezone,
|
||||
headers,
|
||||
layout,
|
||||
}
|
||||
);
|
||||
}),
|
||||
tap(({ buffer }) => {
|
||||
apmGeneratePdf?.end();
|
||||
|
||||
if (buffer) {
|
||||
stream.write(buffer);
|
||||
}
|
||||
}),
|
||||
map(({ metrics, warnings }) => ({
|
||||
content_type: 'application/pdf',
|
||||
metrics: { pdf: metrics },
|
||||
warnings,
|
||||
})),
|
||||
catchError((err) => {
|
||||
jobLogger.error(err);
|
||||
return Rx.throwError(() => err);
|
||||
})
|
||||
);
|
||||
|
||||
const stop$ = Rx.fromEventPattern(cancellationToken.on);
|
||||
|
||||
apmTrans?.end();
|
||||
return Rx.firstValueFrom(process$.pipe(takeUntil(stop$)));
|
||||
};
|
||||
}
|
|
@ -1,11 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export type {
|
||||
JobParamsPDFV2,
|
||||
TaskPayloadPDFV2,
|
||||
} from '../../../common/types/export_types/printable_pdf_v2';
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import { ILicense } from '@kbn/licensing-plugin/server';
|
||||
import { ExportTypeDefinition } from '../types';
|
||||
import { ExportType } from '../export_types/common';
|
||||
import { ExportTypesRegistry } from './export_types_registry';
|
||||
|
||||
export interface LicenseCheckResult {
|
||||
|
@ -25,7 +25,7 @@ const messages = {
|
|||
},
|
||||
};
|
||||
|
||||
const makeManagementFeature = (exportTypes: ExportTypeDefinition[]) => {
|
||||
const makeManagementFeature = (exportTypes: ExportType[]) => {
|
||||
return {
|
||||
id: 'management',
|
||||
checkLicense: (license?: ILicense) => {
|
||||
|
@ -46,7 +46,7 @@ const makeManagementFeature = (exportTypes: ExportTypeDefinition[]) => {
|
|||
}
|
||||
|
||||
const validJobTypes = exportTypes
|
||||
.filter((exportType) => exportType.validLicenses.includes(license.type || ''))
|
||||
.filter((exportType) => exportType.validLicenses.includes(license.type!))
|
||||
.map((exportType) => exportType.jobType);
|
||||
|
||||
return {
|
||||
|
@ -58,7 +58,7 @@ const makeManagementFeature = (exportTypes: ExportTypeDefinition[]) => {
|
|||
};
|
||||
};
|
||||
|
||||
const makeExportTypeFeature = (exportType: ExportTypeDefinition) => {
|
||||
const makeExportTypeFeature = (exportType: ExportType) => {
|
||||
return {
|
||||
id: exportType.id,
|
||||
checkLicense: (license?: ILicense) => {
|
||||
|
|
|
@ -122,4 +122,33 @@ describe('ExportTypesRegistry', function () {
|
|||
}).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('getByJobType', function () {
|
||||
it('returns obj that matches the predicate', function () {
|
||||
const prop = 'fooProp';
|
||||
const match = { id: 'foo', jobType: prop };
|
||||
[match, { id: 'bar' }, { id: 'baz' }].forEach((obj) => exportTypesRegistry.register(obj));
|
||||
expect(exportTypesRegistry.getByJobType(prop)).toBe(match);
|
||||
});
|
||||
|
||||
it('throws Error if multiple items match predicate', function () {
|
||||
const prop = 'fooProp';
|
||||
[
|
||||
{ id: 'foo', jobType: prop },
|
||||
{ id: 'bar', jobType: prop },
|
||||
].forEach((obj) => exportTypesRegistry.register(obj));
|
||||
expect(() => {
|
||||
exportTypesRegistry.getByJobType(prop);
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
it('throws Error if no items match predicate', function () {
|
||||
const prop = 'fooProp';
|
||||
[
|
||||
{ id: 'foo', jobtType: prop },
|
||||
{ id: 'bar', jobType: prop },
|
||||
].forEach((obj) => exportTypesRegistry.register(obj));
|
||||
expect(() => exportTypesRegistry.getByJobType('foo')).toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,24 +6,16 @@
|
|||
*/
|
||||
|
||||
import { isString } from 'lodash';
|
||||
import { getExportType as getTypeCsvFromSavedObject } from '../export_types/csv_v2';
|
||||
import { getExportType as getTypeCsvFromSavedObjectImmediate } from '../export_types/csv_searchsource_immediate';
|
||||
import { getExportType as getTypeCsv } from '../export_types/csv_searchsource';
|
||||
import { getExportType as getTypePng } from '../export_types/png';
|
||||
import { getExportType as getTypePngV2 } from '../export_types/png_v2';
|
||||
import { getExportType as getTypePrintablePdf } from '../export_types/printable_pdf';
|
||||
import { getExportType as getTypePrintablePdfV2 } from '../export_types/printable_pdf_v2';
|
||||
import { ExportType } from '../export_types/common';
|
||||
|
||||
import { CreateJobFn, ExportTypeDefinition } from '../types';
|
||||
|
||||
type GetCallbackFn = (item: ExportTypeDefinition) => boolean;
|
||||
type GetCallbackFn = (item: ExportType) => boolean;
|
||||
|
||||
export class ExportTypesRegistry {
|
||||
private _map: Map<string, ExportTypeDefinition> = new Map();
|
||||
private _map: Map<string, ExportType> = new Map();
|
||||
|
||||
constructor() {}
|
||||
|
||||
register(item: ExportTypeDefinition): void {
|
||||
register(item: ExportType): void {
|
||||
if (!isString(item.id)) {
|
||||
throw new Error(`'item' must have a String 'id' property `);
|
||||
}
|
||||
|
@ -43,21 +35,43 @@ export class ExportTypesRegistry {
|
|||
return this._map.size;
|
||||
}
|
||||
|
||||
getById(id: string): ExportTypeDefinition {
|
||||
getById(id: string): ExportType {
|
||||
if (!this._map.has(id)) {
|
||||
throw new Error(`Unknown id ${id}`);
|
||||
}
|
||||
|
||||
return this._map.get(id) as ExportTypeDefinition;
|
||||
return this._map.get(id) as ExportType;
|
||||
}
|
||||
|
||||
get(findType: GetCallbackFn): ExportTypeDefinition {
|
||||
getByJobType(jobType: ExportType['jobType']): ExportType {
|
||||
let result;
|
||||
for (const value of this._map.values()) {
|
||||
if (value.jobType !== jobType) {
|
||||
continue;
|
||||
}
|
||||
const foundJobType = value;
|
||||
|
||||
if (result) {
|
||||
throw new Error('Found multiple items matching predicate.');
|
||||
}
|
||||
|
||||
result = foundJobType;
|
||||
}
|
||||
|
||||
if (!result) {
|
||||
throw new Error('Found no items matching predicate');
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
get(findType: GetCallbackFn): ExportType {
|
||||
let result;
|
||||
for (const value of this._map.values()) {
|
||||
if (!findType(value)) {
|
||||
continue; // try next value
|
||||
}
|
||||
const foundResult: ExportTypeDefinition = value;
|
||||
const foundResult: ExportType = value;
|
||||
|
||||
if (result) {
|
||||
throw new Error('Found multiple items matching predicate.');
|
||||
|
@ -73,30 +87,3 @@ export class ExportTypesRegistry {
|
|||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Define a 2nd ExportTypeRegistry instance for "immediate execute" report job types only.
|
||||
// It should not require a `CreateJobFn` for its ExportTypeDefinitions, which only makes sense for async.
|
||||
// Once that is done, the `any` types below can be removed.
|
||||
|
||||
/*
|
||||
* @return ExportTypeRegistry: the ExportTypeRegistry instance that should be
|
||||
* used to register async export type definitions
|
||||
*/
|
||||
export function getExportTypesRegistry(): ExportTypesRegistry {
|
||||
const registry = new ExportTypesRegistry();
|
||||
type CreateFnType = CreateJobFn<any, any>; // can not specify params types because different type of params are not assignable to each other
|
||||
type RunFnType = any; // can not specify because ImmediateExecuteFn is not assignable to RunTaskFn
|
||||
const getTypeFns: Array<() => ExportTypeDefinition<CreateFnType | null, RunFnType>> = [
|
||||
getTypeCsv,
|
||||
getTypeCsvFromSavedObject,
|
||||
getTypeCsvFromSavedObjectImmediate,
|
||||
getTypePng,
|
||||
getTypePngV2,
|
||||
getTypePrintablePdf,
|
||||
getTypePrintablePdfV2,
|
||||
];
|
||||
getTypeFns.forEach((getType) => {
|
||||
registry.register(getType());
|
||||
});
|
||||
return registry;
|
||||
}
|
||||
|
|
|
@ -9,7 +9,7 @@ export { checkLicense } from './check_license';
|
|||
export { checkParamsVersion } from './check_params_version';
|
||||
export { ContentStream, getContentStream } from './content_stream';
|
||||
export { cryptoFactory } from './crypto';
|
||||
export { ExportTypesRegistry, getExportTypesRegistry } from './export_types_registry';
|
||||
export { ExportTypesRegistry } from './export_types_registry';
|
||||
export { PassThroughStream } from './passthrough_stream';
|
||||
export { statuses } from './statuses';
|
||||
export { ReportingStore, IlmPolicyManager } from './store';
|
||||
|
|
|
@ -7,11 +7,12 @@
|
|||
|
||||
import { loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { KibanaShuttingDownError } from '@kbn/reporting-common';
|
||||
import { RunContext } from '@kbn/task-manager-plugin/server';
|
||||
import type { RunContext } from '@kbn/task-manager-plugin/server';
|
||||
import { taskManagerMock } from '@kbn/task-manager-plugin/server/mocks';
|
||||
import { ExecuteReportTask } from '.';
|
||||
import { ReportingCore } from '../..';
|
||||
import { ReportingConfigType } from '../../config';
|
||||
import type { ReportingCore } from '../..';
|
||||
import type { ReportingConfigType } from '../../config';
|
||||
import type { ExportType } from '../../export_types/common';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../test_helpers';
|
||||
import type { SavedReport } from '../store';
|
||||
|
||||
|
@ -86,12 +87,14 @@ describe('Execute Report Task', () => {
|
|||
mockReporting.getExportTypesRegistry().register({
|
||||
id: 'noop',
|
||||
name: 'Noop',
|
||||
createJobFnFactory: () => async () => new Promise(() => {}),
|
||||
runTaskFnFactory: () => async () => new Promise(() => {}),
|
||||
jobContentExtension: 'none',
|
||||
setup: jest.fn(),
|
||||
start: jest.fn(),
|
||||
createJob: () => new Promise(() => {}),
|
||||
runTask: () => new Promise(() => {}),
|
||||
jobContentExtension: 'pdf',
|
||||
jobType: 'noop',
|
||||
validLicenses: [],
|
||||
});
|
||||
} as unknown as ExportType);
|
||||
const store = await mockReporting.getStore();
|
||||
store.setReportFailed = jest.fn(() => Promise.resolve({} as any));
|
||||
const task = new ExecuteReportTask(mockReporting, configType, logger);
|
||||
|
|
|
@ -26,12 +26,11 @@ import {
|
|||
TaskRunResult,
|
||||
} from '@kbn/reporting-common';
|
||||
import { mapToReportingError } from '../../../common/errors/map_to_reporting_error';
|
||||
import { getContentStream } from '..';
|
||||
import { ExportTypesRegistry, getContentStream } from '..';
|
||||
import type { ReportingCore } from '../..';
|
||||
import { durationToNumber, numberToDuration } from '../../../common/schema_utils';
|
||||
import type { ReportOutput } from '../../../common/types';
|
||||
import type { ReportingConfigType } from '../../config';
|
||||
import type { BasePayload, ExportTypeDefinition, RunTaskFn } from '../../types';
|
||||
import type { ReportDocument, ReportingStore } from '../store';
|
||||
import { Report, SavedReport } from '../store';
|
||||
import type { ReportFailedFields, ReportProcessingFields } from '../store/store';
|
||||
|
@ -47,10 +46,6 @@ interface ReportingExecuteTaskInstance {
|
|||
runAt?: Date;
|
||||
}
|
||||
|
||||
interface TaskExecutor extends Pick<ExportTypeDefinition, 'jobContentEncoding'> {
|
||||
jobExecutor: RunTaskFn<BasePayload>;
|
||||
}
|
||||
|
||||
function isOutput(output: CompletedReportOutput | Error): output is CompletedReportOutput {
|
||||
return (output as CompletedReportOutput).size != null;
|
||||
}
|
||||
|
@ -80,10 +75,10 @@ export class ExecuteReportTask implements ReportingTask {
|
|||
|
||||
private logger: Logger;
|
||||
private taskManagerStart?: TaskManagerStartContract;
|
||||
private taskExecutors?: Map<string, TaskExecutor>;
|
||||
private kibanaId?: string;
|
||||
private kibanaName?: string;
|
||||
private store?: ReportingStore;
|
||||
private exportTypesRegistry: ExportTypesRegistry;
|
||||
|
||||
constructor(
|
||||
private reporting: ReportingCore,
|
||||
|
@ -91,6 +86,7 @@ export class ExecuteReportTask implements ReportingTask {
|
|||
logger: Logger
|
||||
) {
|
||||
this.logger = logger.get('runTask');
|
||||
this.exportTypesRegistry = this.reporting.getExportTypesRegistry();
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -100,22 +96,6 @@ export class ExecuteReportTask implements ReportingTask {
|
|||
this.taskManagerStart = taskManager;
|
||||
|
||||
const { reporting } = this;
|
||||
|
||||
const exportTypesRegistry = reporting.getExportTypesRegistry();
|
||||
const executors = new Map<string, TaskExecutor>();
|
||||
for (const exportType of exportTypesRegistry.getAll()) {
|
||||
const exportTypeLogger = this.logger.get(exportType.jobType);
|
||||
const jobExecutor = exportType.runTaskFnFactory(reporting, exportTypeLogger);
|
||||
// The task will run the function with the job type as a param.
|
||||
// This allows us to retrieve the specific export type runFn when called to run an export
|
||||
executors.set(exportType.jobType, {
|
||||
jobExecutor,
|
||||
jobContentEncoding: exportType.jobContentEncoding,
|
||||
});
|
||||
}
|
||||
|
||||
this.taskExecutors = executors;
|
||||
|
||||
const { uuid, name } = reporting.getServerInfo();
|
||||
this.kibanaId = uuid;
|
||||
this.kibanaName = name;
|
||||
|
@ -141,7 +121,8 @@ export class ExecuteReportTask implements ReportingTask {
|
|||
}
|
||||
|
||||
private getJobContentEncoding(jobType: string) {
|
||||
return this.taskExecutors?.get(jobType)?.jobContentEncoding;
|
||||
const exportType = this.exportTypesRegistry.getByJobType(jobType);
|
||||
return exportType.jobContentEncoding;
|
||||
}
|
||||
|
||||
public async _claimJob(task: ReportTaskParams): Promise<SavedReport> {
|
||||
|
@ -262,21 +243,16 @@ export class ExecuteReportTask implements ReportingTask {
|
|||
cancellationToken: CancellationToken,
|
||||
stream: Writable
|
||||
): Promise<TaskRunResult> {
|
||||
if (!this.taskExecutors) {
|
||||
throw new Error(`Task run function factories have not been called yet!`);
|
||||
}
|
||||
const exportType = this.exportTypesRegistry.getByJobType(task.jobtype);
|
||||
|
||||
// get the run_task function
|
||||
const runner = this.taskExecutors.get(task.jobtype);
|
||||
if (!runner) {
|
||||
throw new Error(`No defined task runner function for ${task.jobtype}!`);
|
||||
if (!exportType) {
|
||||
throw new Error(`No export type from ${task.jobtype} found to execute report`);
|
||||
}
|
||||
|
||||
// run the report
|
||||
// if workerFn doesn't finish before timeout, call the cancellationToken and throw an error
|
||||
const queueTimeout = durationToNumber(this.config.queue.timeout);
|
||||
return Rx.lastValueFrom(
|
||||
Rx.from(runner.jobExecutor(task.id, task.payload, cancellationToken, stream)).pipe(
|
||||
Rx.from(exportType.runTask(task.id, task.payload, cancellationToken, stream)).pipe(
|
||||
timeout(queueTimeout)
|
||||
) // throw an error if a value is not emitted before timeout
|
||||
);
|
||||
|
@ -301,6 +277,7 @@ export class ExecuteReportTask implements ReportingTask {
|
|||
docId = `/${report._index}/_doc/${report._id}`;
|
||||
|
||||
const resp = await store.setReportCompleted(report, doc);
|
||||
|
||||
this.logger.info(`Saved ${report.jobtype} job ${docId}`);
|
||||
report._seq_no = resp._seq_no;
|
||||
report._primary_term = resp._primary_term;
|
||||
|
@ -383,7 +360,6 @@ export class ExecuteReportTask implements ReportingTask {
|
|||
encoding: jobContentEncoding === 'base64' ? 'base64' : 'raw',
|
||||
}
|
||||
);
|
||||
|
||||
eventLog.logExecutionStart();
|
||||
|
||||
const output = await Promise.race<TaskRunResult>([
|
||||
|
|
|
@ -5,9 +5,13 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { PNG_REPORT_TYPE_V2 } from '../../../common/constants';
|
||||
import { ReportingStart } from '../types';
|
||||
|
||||
export const metadata = {
|
||||
id: PNG_REPORT_TYPE_V2,
|
||||
name: 'PNG',
|
||||
export const reportingMock = {
|
||||
createStart: (): ReportingStart => ({
|
||||
usesUiCapabilities: () => false,
|
||||
registerExportTypes: () => {},
|
||||
getSpaceId: jest.fn(),
|
||||
getScreenshots: jest.fn(),
|
||||
}),
|
||||
};
|
|
@ -18,6 +18,7 @@ import {
|
|||
} from '../../../test_helpers';
|
||||
import type { ReportingRequestHandlerContext } from '../../../types';
|
||||
import { registerDiagnoseBrowser } from '../browser';
|
||||
import { reportingMock } from '../../../mocks';
|
||||
|
||||
type SetupServerReturn = Awaited<ReturnType<typeof setupServer>>;
|
||||
|
||||
|
@ -44,7 +45,7 @@ describe('POST /diagnose/browser', () => {
|
|||
httpSetup.registerRouteHandlerContext<ReportingRequestHandlerContext, 'reporting'>(
|
||||
reportingSymbol,
|
||||
'reporting',
|
||||
() => ({ usesUiCapabilities: () => false, registerExportTypes: jest.fn() })
|
||||
() => reportingMock.createStart()
|
||||
);
|
||||
|
||||
const docLinksSetupMock = docLinksServiceMock.createSetupContract();
|
||||
|
|
|
@ -18,6 +18,7 @@ import {
|
|||
import type { ReportingRequestHandlerContext } from '../../../types';
|
||||
import { registerDiagnoseScreenshot } from '../screenshot';
|
||||
import { defer } from 'rxjs';
|
||||
import { reportingMock } from '../../../mocks';
|
||||
|
||||
jest.mock('../../../export_types/common/generate_png');
|
||||
|
||||
|
@ -45,7 +46,7 @@ describe('POST /diagnose/screenshot', () => {
|
|||
httpSetup.registerRouteHandlerContext<ReportingRequestHandlerContext, 'reporting'>(
|
||||
reportingSymbol,
|
||||
'reporting',
|
||||
() => ({ usesUiCapabilities: () => false, registerExportTypes: jest.fn() })
|
||||
() => reportingMock.createStart()
|
||||
);
|
||||
|
||||
core = await createMockReportingCore(
|
||||
|
|
|
@ -11,8 +11,7 @@ import type { KibanaRequest, Logger } from '@kbn/core/server';
|
|||
import moment from 'moment';
|
||||
import type { ReportingCore } from '../..';
|
||||
import { CSV_SEARCHSOURCE_IMMEDIATE_TYPE } from '../../../common/constants';
|
||||
import { runTaskFnFactory } from '../../export_types/csv_searchsource_immediate/execute_job';
|
||||
import type { JobParamsDownloadCSV } from '../../export_types/csv_searchsource_immediate/types';
|
||||
import { JobParamsDownloadCSV } from '../../export_types/csv_searchsource_immediate/types';
|
||||
import { PassThroughStream } from '../../lib';
|
||||
import { authorizedUserPreRouting, getCounters } from '../lib';
|
||||
|
||||
|
@ -73,7 +72,8 @@ export function registerGenerateCsvFromSavedObjectImmediate(
|
|||
const counters = getCounters(req.route.method, path, reporting.getUsageCounter());
|
||||
|
||||
const logger = parentLogger.get(CSV_SEARCHSOURCE_IMMEDIATE_TYPE);
|
||||
const runTaskFn = runTaskFnFactory(reporting, logger);
|
||||
const csvSearchSourceImmediateExport = await reporting.getCsvSearchSourceImmediate();
|
||||
|
||||
const stream = new PassThroughStream();
|
||||
const eventLog = reporting.getEventLogger({
|
||||
jobtype: CSV_SEARCHSOURCE_IMMEDIATE_TYPE,
|
||||
|
@ -87,7 +87,9 @@ export function registerGenerateCsvFromSavedObjectImmediate(
|
|||
|
||||
try {
|
||||
eventLog.logExecutionStart();
|
||||
const taskPromise = runTaskFn(null, req.body, context, stream, req)
|
||||
|
||||
const taskPromise = csvSearchSourceImmediateExport
|
||||
.runTask(null, req.body, context, stream, req)
|
||||
.then((output) => {
|
||||
logger.info(`Job output size: ${stream.bytesWritten} bytes.`);
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
|
||||
import rison from '@kbn/rison';
|
||||
import { BehaviorSubject } from 'rxjs';
|
||||
import { loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { coreMock, loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { setupServer } from '@kbn/core-test-helpers-test-utils';
|
||||
import supertest from 'supertest';
|
||||
import { ReportingCore } from '../../..';
|
||||
|
@ -23,6 +23,8 @@ import {
|
|||
} from '../../../test_helpers';
|
||||
import type { ReportingRequestHandlerContext } from '../../../types';
|
||||
import { registerJobGenerationRoutes } from '../generate_from_jobparams';
|
||||
import { PdfExportType } from '../../../export_types/printable_pdf_v2';
|
||||
import { reportingMock } from '../../../mocks';
|
||||
|
||||
type SetupServerReturn = Awaited<ReturnType<typeof setupServer>>;
|
||||
|
||||
|
@ -39,13 +41,21 @@ describe('POST /api/reporting/generate', () => {
|
|||
});
|
||||
|
||||
const mockLogger = loggingSystemMock.createLogger();
|
||||
const mockCoreSetup = coreMock.createSetup();
|
||||
|
||||
const mockPdfExportType = new PdfExportType(
|
||||
mockCoreSetup,
|
||||
mockConfigSchema,
|
||||
mockLogger,
|
||||
coreMock.createPluginInitializerContext(mockConfigSchema)
|
||||
);
|
||||
|
||||
beforeEach(async () => {
|
||||
({ server, httpSetup } = await setupServer(reportingSymbol));
|
||||
httpSetup.registerRouteHandlerContext<ReportingRequestHandlerContext, 'reporting'>(
|
||||
reportingSymbol,
|
||||
'reporting',
|
||||
() => ({ usesUiCapabilities: jest.fn(), registerExportTypes: jest.fn() })
|
||||
() => reportingMock.createStart()
|
||||
);
|
||||
|
||||
const mockSetupDeps = createMockPluginSetup({
|
||||
|
@ -77,17 +87,7 @@ describe('POST /api/reporting/generate', () => {
|
|||
);
|
||||
|
||||
mockExportTypesRegistry = new ExportTypesRegistry();
|
||||
mockExportTypesRegistry.register({
|
||||
id: 'printablePdf',
|
||||
name: 'not sure why this field exists',
|
||||
jobType: 'printable_pdf',
|
||||
jobContentEncoding: 'base64',
|
||||
jobContentExtension: 'pdf',
|
||||
validLicenses: ['basic', 'gold'],
|
||||
createJobFnFactory: () => async () => ({ createJobTest: { test1: 'yes' } } as any),
|
||||
runTaskFnFactory: () => async () => ({ runParamsTest: { test2: 'yes' } } as any),
|
||||
});
|
||||
mockReportingCore.getExportTypesRegistry = () => mockExportTypesRegistry;
|
||||
mockExportTypesRegistry.register(mockPdfExportType);
|
||||
|
||||
store = await mockReportingCore.getStore();
|
||||
store.addReport = jest.fn().mockImplementation(async (opts) => {
|
||||
|
@ -189,7 +189,14 @@ describe('POST /api/reporting/generate', () => {
|
|||
|
||||
await supertest(httpSetup.server.listener)
|
||||
.post('/api/reporting/generate/printablePdf')
|
||||
.send({ jobParams: rison.encode({ title: `abc` }) })
|
||||
.send({
|
||||
jobParams: rison.encode({
|
||||
title: `abc`,
|
||||
relativeUrls: ['test'],
|
||||
layout: { id: 'test' },
|
||||
objectType: 'canvas workpad',
|
||||
}),
|
||||
})
|
||||
.expect(200)
|
||||
.then(({ body }) => {
|
||||
expect(body).toMatchObject({
|
||||
|
@ -200,9 +207,19 @@ describe('POST /api/reporting/generate', () => {
|
|||
index: 'foo-index',
|
||||
jobtype: 'printable_pdf',
|
||||
payload: {
|
||||
createJobTest: {
|
||||
test1: 'yes',
|
||||
forceNow: expect.any(String),
|
||||
isDeprecated: true,
|
||||
layout: {
|
||||
id: 'test',
|
||||
},
|
||||
objectType: 'canvas workpad',
|
||||
objects: [
|
||||
{
|
||||
relativeUrl: 'test',
|
||||
},
|
||||
],
|
||||
title: 'abc',
|
||||
version: '7.14.0',
|
||||
},
|
||||
status: 'pending',
|
||||
},
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
*/
|
||||
|
||||
import { Readable } from 'stream';
|
||||
import { CSV_JOB_TYPE, PDF_JOB_TYPE } from '../../../common/constants';
|
||||
import { CSV_JOB_TYPE, PDF_JOB_TYPE, PDF_JOB_TYPE_V2 } from '../../../common/constants';
|
||||
import { ReportApiJSON } from '../../../common/types';
|
||||
import { ContentStream, getContentStream, statuses } from '../../lib';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../test_helpers';
|
||||
|
@ -104,7 +104,7 @@ describe('getDocumentPayload', () => {
|
|||
id: 'id1',
|
||||
index: '.reporting-12345',
|
||||
status: statuses.JOB_STATUS_FAILED,
|
||||
jobtype: PDF_JOB_TYPE,
|
||||
jobtype: PDF_JOB_TYPE_V2,
|
||||
output: {},
|
||||
payload: {},
|
||||
} as ReportApiJSON)
|
||||
|
@ -128,7 +128,7 @@ describe('getDocumentPayload', () => {
|
|||
id: 'id1',
|
||||
index: '.reporting-12345',
|
||||
status: statuses.JOB_STATUS_PENDING,
|
||||
jobtype: PDF_JOB_TYPE,
|
||||
jobtype: PDF_JOB_TYPE_V2,
|
||||
output: {},
|
||||
payload: {},
|
||||
} as ReportApiJSON)
|
||||
|
|
|
@ -10,8 +10,8 @@ import { Stream } from 'stream';
|
|||
import { ReportingCore } from '../..';
|
||||
import { CSV_JOB_TYPE, CSV_JOB_TYPE_DEPRECATED } from '../../../common/constants';
|
||||
import { ReportApiJSON } from '../../../common/types';
|
||||
import { ExportType } from '../../export_types/common';
|
||||
import { getContentStream, statuses } from '../../lib';
|
||||
import { ExportTypeDefinition } from '../../types';
|
||||
import { jobsQueryFactory } from './jobs_query';
|
||||
|
||||
export interface ErrorFromPayload {
|
||||
|
@ -33,10 +33,10 @@ type TaskRunResult = Required<ReportApiJSON>['output'];
|
|||
|
||||
const DEFAULT_TITLE = 'report';
|
||||
|
||||
const getTitle = (exportType: ExportTypeDefinition, title?: string): string =>
|
||||
const getTitle = (exportType: ExportType, title?: string): string =>
|
||||
`${title || DEFAULT_TITLE}.${exportType.jobContentExtension}`;
|
||||
|
||||
const getReportingHeaders = (output: TaskRunResult, exportType: ExportTypeDefinition) => {
|
||||
const getReportingHeaders = (output: TaskRunResult, exportType: ExportType) => {
|
||||
const metaDataHeaders: Record<string, boolean> = {};
|
||||
|
||||
if (exportType.jobType === CSV_JOB_TYPE || exportType.jobType === CSV_JOB_TYPE_DEPRECATED) {
|
||||
|
@ -60,9 +60,7 @@ export function getDocumentPayloadFactory(reporting: ReportingCore) {
|
|||
jobtype: jobType,
|
||||
payload: { title },
|
||||
}: Required<ReportApiJSON>): Promise<Payload> {
|
||||
const exportType = exportTypesRegistry.get(
|
||||
(item: ExportTypeDefinition) => item.jobType === jobType
|
||||
);
|
||||
const exportType = exportTypesRegistry.getByJobType(jobType);
|
||||
const encoding = exportType.jobContentEncoding === 'base64' ? 'base64' : 'raw';
|
||||
const content = await getContentStream(reporting, { id, index }, { encoding });
|
||||
const filename = getTitle(exportType, title);
|
||||
|
|
|
@ -7,8 +7,9 @@
|
|||
|
||||
import { KibanaRequest, KibanaResponseFactory } from '@kbn/core/server';
|
||||
import { coreMock, httpServerMock, loggingSystemMock } from '@kbn/core/server/mocks';
|
||||
import { TaskPayloadPDFV2 } from '../../../common/types/export_types/printable_pdf_v2';
|
||||
import { ReportingCore } from '../..';
|
||||
import { JobParamsPDFDeprecated, TaskPayloadPDF } from '../../export_types/printable_pdf/types';
|
||||
import { JobParamsPDFDeprecated } from '../../export_types/printable_pdf/types';
|
||||
import { Report, ReportingStore } from '../../lib/store';
|
||||
import { ReportApiJSON } from '../../lib/store/report';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../../test_helpers';
|
||||
|
@ -94,7 +95,7 @@ describe('Handle request to generate', () => {
|
|||
|
||||
describe('Enqueue Job', () => {
|
||||
test('creates a report object to queue', async () => {
|
||||
const report = await requestHandler.enqueueJob('printablePdf', mockJobParams);
|
||||
const report = await requestHandler.enqueueJob('printablePdfV2', mockJobParams);
|
||||
|
||||
const { _id, created_at: _created_at, payload, ...snapObj } = report;
|
||||
expect(snapObj).toMatchInlineSnapshot(`
|
||||
|
@ -106,12 +107,12 @@ describe('Handle request to generate', () => {
|
|||
"completed_at": undefined,
|
||||
"created_by": "testymcgee",
|
||||
"execution_time_ms": undefined,
|
||||
"jobtype": "printable_pdf",
|
||||
"jobtype": "printable_pdf_v2",
|
||||
"kibana_id": undefined,
|
||||
"kibana_name": undefined,
|
||||
"max_attempts": undefined,
|
||||
"meta": Object {
|
||||
"isDeprecated": true,
|
||||
"isDeprecated": false,
|
||||
"layout": "preserve_layout",
|
||||
"objectType": "cool_object_type",
|
||||
},
|
||||
|
@ -125,17 +126,18 @@ describe('Handle request to generate', () => {
|
|||
"timeout": undefined,
|
||||
}
|
||||
`);
|
||||
const { forceNow, ...snapPayload } = payload as TaskPayloadPDF;
|
||||
const { forceNow, ...snapPayload } = payload as TaskPayloadPDFV2;
|
||||
expect(snapPayload).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"browserTimezone": "UTC",
|
||||
"headers": "hello mock cypher text",
|
||||
"isDeprecated": true,
|
||||
"isDeprecated": false,
|
||||
"layout": Object {
|
||||
"id": "preserve_layout",
|
||||
},
|
||||
"locatorParams": undefined,
|
||||
"objectType": "cool_object_type",
|
||||
"objects": Array [],
|
||||
"relativeUrls": Array [],
|
||||
"spaceId": undefined,
|
||||
"title": "cool_title",
|
||||
"version": "unknown",
|
||||
|
@ -144,6 +146,7 @@ describe('Handle request to generate', () => {
|
|||
});
|
||||
|
||||
test('provides a default kibana version field for older POST URLs', async () => {
|
||||
// how do we handle the printable_pdf endpoint that isn't migrating to the class instance of export types?
|
||||
(mockJobParams as unknown as { version?: string }).version = undefined;
|
||||
const report = await requestHandler.enqueueJob('printablePdf', mockJobParams);
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ export class RequestHandler {
|
|||
}
|
||||
|
||||
public async enqueueJob(exportTypeId: string, jobParams: BaseParams) {
|
||||
const { reporting, logger, context, req: request, user } = this;
|
||||
const { reporting, logger, context, req, user } = this;
|
||||
|
||||
const exportType = reporting.getExportTypesRegistry().getById(exportTypeId);
|
||||
|
||||
|
@ -53,33 +53,29 @@ export class RequestHandler {
|
|||
throw new Error(`Export type ${exportTypeId} does not exist in the registry!`);
|
||||
}
|
||||
|
||||
if (!exportType.createJobFnFactory) {
|
||||
throw new Error(`Export type ${exportTypeId} is not an async job type!`);
|
||||
const store = await reporting.getStore();
|
||||
|
||||
if (!exportType.createJob) {
|
||||
throw new Error(`Export type ${exportTypeId} is not a valid instance!`);
|
||||
}
|
||||
|
||||
const [createJob, store] = await Promise.all([
|
||||
exportType.createJobFnFactory(reporting, logger.get(exportType.id)),
|
||||
reporting.getStore(),
|
||||
]);
|
||||
|
||||
if (!createJob) {
|
||||
throw new Error(`Export type ${exportTypeId} is not an async job type!`);
|
||||
}
|
||||
|
||||
// 1. ensure the incoming params have a version field (should be set by the UI)
|
||||
// 1. Ensure the incoming params have a version field (should be set by the UI)
|
||||
jobParams.version = checkParamsVersion(jobParams, logger);
|
||||
|
||||
// 2. encrypt request headers for the running report job to authenticate itself with Kibana
|
||||
// 3. call the export type's createJobFn to create the job payload
|
||||
const [headers, job] = await Promise.all([
|
||||
this.encryptHeaders(),
|
||||
createJob(jobParams, context, this.req),
|
||||
]);
|
||||
// 2. Encrypt request headers to store for the running report job to authenticate itself with Kibana
|
||||
const headers = await this.encryptHeaders();
|
||||
|
||||
// 3. Create a payload object by calling exportType.createJob(), and adding some automatic parameters
|
||||
const job = await exportType.createJob(jobParams, context, req);
|
||||
|
||||
const payload = {
|
||||
...job,
|
||||
headers,
|
||||
spaceId: reporting.getSpaceId(request, logger),
|
||||
title: job.title,
|
||||
objectType: jobParams.objectType,
|
||||
browserTimezone: jobParams.browserTimezone,
|
||||
version: jobParams.version,
|
||||
spaceId: reporting.getSpaceId(req, logger),
|
||||
};
|
||||
|
||||
// 4. Add the report to ReportingStore to show as pending
|
||||
|
@ -88,6 +84,7 @@ export class RequestHandler {
|
|||
jobtype: exportType.jobType,
|
||||
created_by: user ? user.username : false,
|
||||
payload,
|
||||
migration_version: jobParams.version,
|
||||
meta: {
|
||||
// telemetry fields
|
||||
objectType: jobParams.objectType,
|
||||
|
@ -106,7 +103,6 @@ export class RequestHandler {
|
|||
|
||||
// 6. Log the action with event log
|
||||
reporting.getEventLogger(report, task).logScheduleTask();
|
||||
|
||||
return report;
|
||||
}
|
||||
|
||||
|
@ -140,10 +136,8 @@ export class RequestHandler {
|
|||
let report: Report | undefined;
|
||||
try {
|
||||
report = await this.enqueueJob(exportTypeId, jobParams);
|
||||
|
||||
// return task manager's task information and the download URL
|
||||
const downloadBaseUrl = getDownloadBaseUrl(this.reporting);
|
||||
|
||||
counters.usageCounter();
|
||||
|
||||
return this.res.ok({
|
||||
|
|
|
@ -24,8 +24,10 @@ import {
|
|||
createMockPluginStart,
|
||||
createMockReportingCore,
|
||||
} from '../../../test_helpers';
|
||||
import { ExportTypeDefinition, ReportingRequestHandlerContext } from '../../../types';
|
||||
import { ReportingRequestHandlerContext } from '../../../types';
|
||||
import { registerJobInfoRoutes } from '../jobs';
|
||||
import { ExportType } from '../../../export_types/common';
|
||||
import { reportingMock } from '../../../mocks';
|
||||
|
||||
type SetupServerReturn = Awaited<ReturnType<typeof setupServer>>;
|
||||
|
||||
|
@ -55,7 +57,7 @@ describe('GET /api/reporting/jobs/download', () => {
|
|||
httpSetup.registerRouteHandlerContext<ReportingRequestHandlerContext, 'reporting'>(
|
||||
reportingSymbol,
|
||||
'reporting',
|
||||
() => ({ usesUiCapabilities: jest.fn(), registerExportTypes: jest.fn() })
|
||||
() => reportingMock.createStart()
|
||||
);
|
||||
|
||||
mockSetupDeps = createMockPluginSetup({
|
||||
|
@ -88,14 +90,14 @@ describe('GET /api/reporting/jobs/download', () => {
|
|||
jobType: 'unencodedJobType',
|
||||
jobContentExtension: 'csv',
|
||||
validLicenses: ['basic', 'gold'],
|
||||
} as ExportTypeDefinition);
|
||||
} as ExportType);
|
||||
exportTypesRegistry.register({
|
||||
id: 'base64Encoded',
|
||||
jobType: 'base64EncodedJobType',
|
||||
jobContentEncoding: 'base64',
|
||||
jobContentExtension: 'pdf',
|
||||
validLicenses: ['basic', 'gold'],
|
||||
} as ExportTypeDefinition);
|
||||
} as ExportType);
|
||||
core.getExportTypesRegistry = () => exportTypesRegistry;
|
||||
|
||||
mockEsClient = (await core.getEsClient()).asInternalUser as typeof mockEsClient;
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { CustomRequestHandlerContext, IRouter, KibanaRequest, Logger } from '@kbn/core/server';
|
||||
import type { CustomRequestHandlerContext, IRouter, KibanaRequest } from '@kbn/core/server';
|
||||
import type { DataPluginStart } from '@kbn/data-plugin/server/plugin';
|
||||
import { DiscoverServerPluginStart } from '@kbn/discover-plugin/server';
|
||||
import type { PluginSetupContract as FeaturesPluginSetup } from '@kbn/features-plugin/server';
|
||||
|
@ -32,19 +32,20 @@ import type { Writable } from 'stream';
|
|||
import type { CancellationToken, TaskRunResult } from '@kbn/reporting-common';
|
||||
import type { BaseParams, BasePayload, UrlOrUrlLocatorTuple } from '../common/types';
|
||||
import type { ReportingConfigType } from './config';
|
||||
import type { ReportingCore } from './core';
|
||||
import type { ReportTaskParams } from './lib/tasks';
|
||||
import { ExportTypesRegistry } from './lib';
|
||||
import { ReportingCore } from './core';
|
||||
|
||||
/**
|
||||
* Plugin Setup Contract
|
||||
*/
|
||||
export interface ReportingSetup {
|
||||
registerExportTypes: ExportTypesRegistry['register'];
|
||||
getSpaceId: ReportingCore['getSpaceId'];
|
||||
getScreenshots: ReportingCore['getScreenshots'];
|
||||
/**
|
||||
* Used to inform plugins if Reporting config is compatible with UI Capabilities / Application Sub-Feature Controls
|
||||
*/
|
||||
usesUiCapabilities: () => boolean;
|
||||
registerExportTypes: ExportTypesRegistry['register'];
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -58,46 +59,21 @@ export type ScrollConfig = ReportingConfigType['csv']['scroll'];
|
|||
/**
|
||||
* Internal Types
|
||||
*/
|
||||
|
||||
// default fn type for CreateJobFnFactory
|
||||
// standard type for create job function of any ExportType implementation
|
||||
export type CreateJobFn<JobParamsType = BaseParams, JobPayloadType = BasePayload> = (
|
||||
jobParams: JobParamsType,
|
||||
context: ReportingRequestHandlerContext,
|
||||
req: KibanaRequest
|
||||
) => Promise<Omit<JobPayloadType, 'headers' | 'spaceId'>>;
|
||||
|
||||
// default fn type for RunTaskFnFactory
|
||||
// standard type for run task function of any ExportType implementation
|
||||
export type RunTaskFn<TaskPayloadType = BasePayload> = (
|
||||
jobId: string,
|
||||
payload: ReportTaskParams<TaskPayloadType>['payload'],
|
||||
payload: TaskPayloadType,
|
||||
cancellationToken: CancellationToken,
|
||||
stream: Writable
|
||||
) => Promise<TaskRunResult>;
|
||||
|
||||
export type CreateJobFnFactory<CreateJobFnType> = (
|
||||
reporting: ReportingCore,
|
||||
logger: Logger
|
||||
) => CreateJobFnType;
|
||||
|
||||
export type RunTaskFnFactory<RunTaskFnType> = (
|
||||
reporting: ReportingCore,
|
||||
logger: Logger
|
||||
) => RunTaskFnType;
|
||||
|
||||
export interface ExportTypeDefinition<
|
||||
CreateJobFnType = CreateJobFn | null,
|
||||
RunTaskFnType = RunTaskFn
|
||||
> {
|
||||
id: string;
|
||||
name: string;
|
||||
jobType: string;
|
||||
jobContentEncoding?: string;
|
||||
jobContentExtension: string;
|
||||
createJobFnFactory: CreateJobFnFactory<CreateJobFnType> | null; // immediate job does not have a "create" phase
|
||||
runTaskFnFactory: RunTaskFnFactory<RunTaskFnType>;
|
||||
validLicenses: string[];
|
||||
}
|
||||
|
||||
export interface ReportingSetupDeps {
|
||||
features: FeaturesPluginSetup;
|
||||
screenshotMode: ScreenshotModePluginSetup;
|
||||
|
|
|
@ -2829,37 +2829,6 @@ Object {
|
|||
},
|
||||
"total": 4,
|
||||
},
|
||||
"csv_searchsource_immediate": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
"dashboard": 0,
|
||||
"search": 0,
|
||||
"visualization": 0,
|
||||
},
|
||||
"available": true,
|
||||
"deprecated": 0,
|
||||
"error_codes": undefined,
|
||||
"execution_times": undefined,
|
||||
"layout": undefined,
|
||||
"metrics": Object {
|
||||
"csv_rows": Object {
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
},
|
||||
"output_size": Object {
|
||||
"1.0": null,
|
||||
"25.0": null,
|
||||
"5.0": null,
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_v2": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
|
@ -2999,37 +2968,6 @@ Object {
|
|||
},
|
||||
"total": 4,
|
||||
},
|
||||
"csv_searchsource_immediate": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
"dashboard": 0,
|
||||
"search": 0,
|
||||
"visualization": 0,
|
||||
},
|
||||
"available": true,
|
||||
"deprecated": 0,
|
||||
"error_codes": undefined,
|
||||
"execution_times": undefined,
|
||||
"layout": undefined,
|
||||
"metrics": Object {
|
||||
"csv_rows": Object {
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
},
|
||||
"output_size": Object {
|
||||
"1.0": null,
|
||||
"25.0": null,
|
||||
"5.0": null,
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_v2": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
|
@ -3388,37 +3326,6 @@ Object {
|
|||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_searchsource_immediate": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
"dashboard": 0,
|
||||
"search": 0,
|
||||
"visualization": 0,
|
||||
},
|
||||
"available": true,
|
||||
"deprecated": 0,
|
||||
"error_codes": undefined,
|
||||
"execution_times": undefined,
|
||||
"layout": undefined,
|
||||
"metrics": Object {
|
||||
"csv_rows": Object {
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
},
|
||||
"output_size": Object {
|
||||
"1.0": null,
|
||||
"25.0": null,
|
||||
"5.0": null,
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_v2": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
|
@ -3558,37 +3465,6 @@ Object {
|
|||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_searchsource_immediate": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
"dashboard": 0,
|
||||
"search": 0,
|
||||
"visualization": 0,
|
||||
},
|
||||
"available": true,
|
||||
"deprecated": 0,
|
||||
"error_codes": undefined,
|
||||
"execution_times": undefined,
|
||||
"layout": undefined,
|
||||
"metrics": Object {
|
||||
"csv_rows": Object {
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
},
|
||||
"output_size": Object {
|
||||
"1.0": null,
|
||||
"25.0": null,
|
||||
"5.0": null,
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_v2": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
|
@ -3935,37 +3811,6 @@ Object {
|
|||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_searchsource_immediate": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
"dashboard": 0,
|
||||
"search": 0,
|
||||
"visualization": 0,
|
||||
},
|
||||
"available": true,
|
||||
"deprecated": 0,
|
||||
"error_codes": undefined,
|
||||
"execution_times": undefined,
|
||||
"layout": undefined,
|
||||
"metrics": Object {
|
||||
"csv_rows": Object {
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
},
|
||||
"output_size": Object {
|
||||
"1.0": null,
|
||||
"25.0": null,
|
||||
"5.0": null,
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_v2": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
|
@ -4105,37 +3950,6 @@ Object {
|
|||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_searchsource_immediate": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
"dashboard": 0,
|
||||
"search": 0,
|
||||
"visualization": 0,
|
||||
},
|
||||
"available": true,
|
||||
"deprecated": 0,
|
||||
"error_codes": undefined,
|
||||
"execution_times": undefined,
|
||||
"layout": undefined,
|
||||
"metrics": Object {
|
||||
"csv_rows": Object {
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
},
|
||||
"output_size": Object {
|
||||
"1.0": null,
|
||||
"25.0": null,
|
||||
"5.0": null,
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_v2": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
|
@ -4516,37 +4330,6 @@ Object {
|
|||
},
|
||||
"total": 1,
|
||||
},
|
||||
"csv_searchsource_immediate": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
"dashboard": 0,
|
||||
"search": 0,
|
||||
"visualization": 0,
|
||||
},
|
||||
"available": true,
|
||||
"deprecated": 0,
|
||||
"error_codes": undefined,
|
||||
"execution_times": undefined,
|
||||
"layout": undefined,
|
||||
"metrics": Object {
|
||||
"csv_rows": Object {
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
},
|
||||
"output_size": Object {
|
||||
"1.0": null,
|
||||
"25.0": null,
|
||||
"5.0": null,
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_v2": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
|
@ -4686,37 +4469,6 @@ Object {
|
|||
},
|
||||
"total": 1,
|
||||
},
|
||||
"csv_searchsource_immediate": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
"dashboard": 0,
|
||||
"search": 0,
|
||||
"visualization": 0,
|
||||
},
|
||||
"available": true,
|
||||
"deprecated": 0,
|
||||
"error_codes": undefined,
|
||||
"execution_times": undefined,
|
||||
"layout": undefined,
|
||||
"metrics": Object {
|
||||
"csv_rows": Object {
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
},
|
||||
"output_size": Object {
|
||||
"1.0": null,
|
||||
"25.0": null,
|
||||
"5.0": null,
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_v2": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
|
@ -5085,37 +4837,6 @@ Object {
|
|||
},
|
||||
"total": 1,
|
||||
},
|
||||
"csv_searchsource_immediate": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
"dashboard": 0,
|
||||
"search": 0,
|
||||
"visualization": 0,
|
||||
},
|
||||
"available": true,
|
||||
"deprecated": 0,
|
||||
"error_codes": undefined,
|
||||
"execution_times": undefined,
|
||||
"layout": undefined,
|
||||
"metrics": Object {
|
||||
"csv_rows": Object {
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
},
|
||||
"output_size": Object {
|
||||
"1.0": null,
|
||||
"25.0": null,
|
||||
"5.0": null,
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_v2": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
|
@ -5255,37 +4976,6 @@ Object {
|
|||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_searchsource_immediate": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
"dashboard": 0,
|
||||
"search": 0,
|
||||
"visualization": 0,
|
||||
},
|
||||
"available": true,
|
||||
"deprecated": 0,
|
||||
"error_codes": undefined,
|
||||
"execution_times": undefined,
|
||||
"layout": undefined,
|
||||
"metrics": Object {
|
||||
"csv_rows": Object {
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
},
|
||||
"output_size": Object {
|
||||
"1.0": null,
|
||||
"25.0": null,
|
||||
"5.0": null,
|
||||
"50.0": null,
|
||||
"75.0": null,
|
||||
"95.0": null,
|
||||
"99.0": null,
|
||||
},
|
||||
"total": 0,
|
||||
},
|
||||
"csv_v2": Object {
|
||||
"app": Object {
|
||||
"canvas workpad": 0,
|
||||
|
|
|
@ -5,7 +5,8 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import { getExportTypesRegistry } from '../lib';
|
||||
import { ExportTypesRegistry } from '../lib';
|
||||
import { createMockReportingCore, createMockConfigSchema } from '../test_helpers';
|
||||
import { getExportStats } from './get_export_stats';
|
||||
import { getExportTypesHandler } from './get_export_type_handler';
|
||||
import { ErrorCodeStats, FeatureAvailabilityMap, MetricsStats } from './types';
|
||||
|
@ -20,13 +21,16 @@ const sizesAggResponse = {
|
|||
'95.0': 1.1935594e7,
|
||||
'99.0': 1.1935594e7,
|
||||
};
|
||||
let exportTypesRegistry: ExportTypesRegistry;
|
||||
let exportTypesHandler: ReturnType<typeof getExportTypesHandler>;
|
||||
|
||||
beforeEach(() => {
|
||||
beforeEach(async () => {
|
||||
const mockReporting = await createMockReportingCore(createMockConfigSchema());
|
||||
exportTypesRegistry = mockReporting.getExportTypesRegistry();
|
||||
exportTypesHandler = getExportTypesHandler(exportTypesRegistry);
|
||||
featureMap = { PNG: true, csv_searchsource: true, printable_pdf: true };
|
||||
});
|
||||
|
||||
const exportTypesHandler = getExportTypesHandler(getExportTypesRegistry());
|
||||
|
||||
test('Model of job status and status-by-pdf-app', () => {
|
||||
const result = getExportStats(
|
||||
{
|
||||
|
@ -414,19 +418,6 @@ test('Incorporate error code stats', () => {
|
|||
invalid_layout_parameters_error: 0,
|
||||
},
|
||||
},
|
||||
csv_searchsource_immediate: {
|
||||
available: true,
|
||||
total: 3,
|
||||
output_size: sizesAggResponse,
|
||||
metrics: { png_cpu: {}, png_memory: {} } as MetricsStats,
|
||||
app: { dashboard: 3, visualization: 0, 'canvas workpad': 0 },
|
||||
error_codes: {
|
||||
authentication_expired_error: 5,
|
||||
queue_timeout_error: 1,
|
||||
unknown_error: 0,
|
||||
kibana_shutting_down_error: 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
featureMap,
|
||||
exportTypesHandler
|
||||
|
@ -459,13 +450,4 @@ test('Incorporate error code stats', () => {
|
|||
"visual_reporting_soft_disabled_error": 1,
|
||||
}
|
||||
`);
|
||||
|
||||
expect(result.csv_searchsource_immediate.error_codes).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"authentication_expired_error": 5,
|
||||
"kibana_shutting_down_error": 1,
|
||||
"queue_timeout_error": 1,
|
||||
"unknown_error": 0,
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
|
|
@ -12,15 +12,13 @@ import {
|
|||
createCollectorFetchContextMock,
|
||||
usageCollectionPluginMock,
|
||||
} from '@kbn/usage-collection-plugin/server/mocks';
|
||||
import { getExportTypesRegistry } from '../lib/export_types_registry';
|
||||
import { createMockConfigSchema, createMockReportingCore } from '../test_helpers';
|
||||
import { FeaturesAvailability } from '.';
|
||||
import {
|
||||
getReportingUsageCollector,
|
||||
registerReportingUsageCollector,
|
||||
} from './reporting_usage_collector';
|
||||
|
||||
const exportTypesRegistry = getExportTypesRegistry();
|
||||
import { ExportTypesRegistry } from '../lib';
|
||||
|
||||
const getLicenseMock =
|
||||
(licenseType = 'gold') =>
|
||||
|
@ -40,11 +38,15 @@ const getMockFetchClients = (resp: any) => {
|
|||
};
|
||||
|
||||
const usageCollectionSetup = usageCollectionPluginMock.createSetupContract();
|
||||
let exportTypesRegistry: ExportTypesRegistry;
|
||||
|
||||
describe('license checks', () => {
|
||||
describe('with a basic license', () => {
|
||||
let usageStats: any;
|
||||
beforeAll(async () => {
|
||||
const mockReporting = await createMockReportingCore(createMockConfigSchema());
|
||||
exportTypesRegistry = mockReporting.getExportTypesRegistry();
|
||||
|
||||
const collector = getReportingUsageCollector(
|
||||
usageCollectionSetup,
|
||||
getLicenseMock('basic'),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue