[Reporting] Track retryAt time on the task instance (#174409)

## Summary

Initial part of https://github.com/elastic/kibana/issues/131852

This PR moves towards auto-calculating the maximum timeouts calculated
based on the timing context provided by the running task instance.

### Other changes
* Added an optional logger parameter to the `getScreenshots` function.
When a logger is provided, the logs created by the screenshot plugin
will have the contextual tags added by the calling code.
  * Before
<img width="1198" alt="image"
src="f68a102e-6af2-4863-aedb-52f1e4a099d8">
  * After
<img width="1200" alt="image"
src="2dd4c947-ffa6-4cb3-b8a2-22893f49ddb7">

* Fixed an unreported bug where browser timezone was not utilized in PNG
reports.

### Checklist

Delete any items that are not applicable to this PR.

- [x] [Unit or functional
tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html)
were updated or added to match the most common scenarios
This commit is contained in:
Tim Sullivan 2024-01-16 14:40:41 -07:00 committed by GitHub
parent f288919b14
commit decec379ae
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 282 additions and 107 deletions

View file

@ -25,8 +25,8 @@ import { IScopedSearchClient } from '@kbn/data-plugin/server';
import { dataPluginMock } from '@kbn/data-plugin/server/mocks';
import { FieldFormatsRegistry } from '@kbn/field-formats-plugin/common';
import { CancellationToken } from '@kbn/reporting-common';
import type { ReportingConfigType } from '@kbn/reporting-server';
import { JobParamsCSV } from '@kbn/reporting-export-types-csv-common';
import type { ReportingConfigType } from '@kbn/reporting-server';
import {
UI_SETTINGS_CSV_QUOTE_VALUES,
UI_SETTINGS_CSV_SEPARATOR,
@ -37,6 +37,7 @@ import { CsvGenerator } from './generate_csv';
const createMockJob = (baseObj: any = {}): JobParamsCSV => ({
...baseObj,
});
const mockTaskInstanceFields = { startedAt: null, retryAt: null };
describe('CsvGenerator', () => {
let mockEsClient: IScopedClusterClient;
@ -145,6 +146,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -180,6 +182,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -219,6 +222,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -269,6 +273,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -328,6 +333,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -400,6 +406,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ searchSource: {}, columns: [] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -440,6 +447,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ searchSource: {}, columns: ['_id', 'sku'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -487,6 +495,7 @@ describe('CsvGenerator', () => {
columns: ['_id', '_index', 'date', 'message'],
}),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -541,6 +550,7 @@ describe('CsvGenerator', () => {
},
}),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -583,6 +593,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ searchSource: {}, columns: ['product', 'category'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -621,6 +632,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ searchSource: {}, columns: ['_id', '_index', 'product', 'category'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -659,6 +671,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ searchSource: {}, columns: [] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -699,6 +712,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -737,6 +751,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', TEST_FORMULA] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -784,6 +799,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -817,6 +833,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({}),
mockConfig,
mockTaskInstanceFields,
{ es: mockEsClient, data: mockDataClient, uiSettings: uiSettingsClient },
{
searchSourceStart: mockSearchSourceService,
@ -872,6 +889,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -923,6 +941,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -960,6 +979,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -992,6 +1012,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -1041,6 +1062,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,
@ -1100,6 +1122,7 @@ describe('CsvGenerator', () => {
const generateCsv = new CsvGenerator(
createMockJob({ columns: ['date', 'ip', 'message'] }),
mockConfig,
mockTaskInstanceFields,
{
es: mockEsClient,
data: mockDataClient,

View file

@ -6,6 +6,7 @@
* Side Public License, v 1.
*/
import moment from 'moment';
import { lastValueFrom } from 'rxjs';
import type { Writable } from 'stream';
@ -31,7 +32,7 @@ import {
ReportingError,
byteSizeValueToNumber,
} from '@kbn/reporting-common';
import type { TaskRunResult } from '@kbn/reporting-common/types';
import type { TaskInstanceFields, TaskRunResult } from '@kbn/reporting-common/types';
import type { ReportingConfigType } from '@kbn/reporting-server';
import { CONTENT_TYPE_CSV } from './constants';
@ -59,6 +60,7 @@ export class CsvGenerator {
constructor(
private job: Omit<JobParamsCSV, 'version'>,
private config: ReportingConfigType['csv'],
private taskInstanceFields: TaskInstanceFields,
private clients: Clients,
private dependencies: Dependencies,
private cancellationToken: CancellationToken,
@ -346,16 +348,19 @@ export class CsvGenerator {
}
public async generateData(): Promise<TaskRunResult> {
const logger = this.logger;
const [settings, searchSource] = await Promise.all([
getExportSettings(
this.clients.uiSettings,
this.config,
this.job.browserTimezone,
this.logger
),
getExportSettings(this.clients.uiSettings, this.config, this.job.browserTimezone, logger),
this.dependencies.searchSourceStart.create(this.job.searchSource),
]);
let reportingError: undefined | ReportingError;
const { startedAt, retryAt } = this.taskInstanceFields;
if (startedAt) {
this.logger.debug(
`Task started at: ${startedAt && moment(startedAt).format()}.` +
` Can run until: ${retryAt && moment(retryAt).format()}`
);
}
const index = searchSource.getField('index');
@ -372,12 +377,13 @@ export class CsvGenerator {
let totalRecords: number | undefined;
let searchAfter: estypes.SortResults | undefined;
let reportingError: undefined | ReportingError;
let pitId = await this.openPointInTime(indexPatternTitle, settings);
// apply timezone from the job to all date field formatters
try {
index.fields.getByType('date').forEach(({ name }) => {
this.logger.debug(`Setting timezone on ${name}`);
logger.debug(`Setting timezone on ${name}`);
const format: FieldFormatConfig = {
...index.fieldFormatMap[name],
id: index.fieldFormatMap[name]?.id || 'date', // allow id: date_nanos
@ -389,7 +395,7 @@ export class CsvGenerator {
index.setFieldFormat(name, format);
});
} catch (err) {
this.logger.error(err);
logger.error(err);
}
const columns = new Set<string>(this.job.columns ?? []);
@ -403,7 +409,7 @@ export class CsvGenerator {
const results = await this.doSearch(searchSource, settings, searchAfter);
if (!results) {
this.logger.warn(`Search results are undefined!`);
logger.warn(`Search results are undefined!`);
break;
}
@ -423,14 +429,14 @@ export class CsvGenerator {
// Update last sort results for next query. PIT is used, so the sort results
// automatically include _shard_doc as a tiebreaker
searchAfter = hits[hits.length - 1]?.sort as estypes.SortResults | undefined;
this.logger.debug(`Received search_after: [${searchAfter}]`);
logger.debug(`Received search_after: [${searchAfter}]`);
// check for shard failures, log them and add a warning if found
const { _shards: shards } = results;
if (shards.failures) {
shards.failures.forEach(({ reason }) => {
warnings.push(`Shard failure: ${JSON.stringify(reason)}`);
this.logger.warn(JSON.stringify(reason));
logger.warn(JSON.stringify(reason));
});
}
@ -438,7 +444,7 @@ export class CsvGenerator {
try {
table = tabifyDocs(results, index, { shallow: true, includeIgnoredValues: true });
} catch (err) {
this.logger.error(err);
logger.error(err);
warnings.push(i18nTexts.unknownError(err?.message ?? err));
}
@ -472,7 +478,7 @@ export class CsvGenerator {
warnings.push(i18nTexts.escapedFormulaValuesMessage);
}
} catch (err) {
this.logger.error(err);
logger.error(err);
if (err instanceof esErrors.ResponseError) {
if ([401, 403].includes(err.statusCode ?? 0)) {
reportingError = new AuthenticationExpiredError();
@ -487,20 +493,20 @@ export class CsvGenerator {
try {
if (pitId) {
this.logger.debug(`Closing PIT ${this.formatPit(pitId)}`);
logger.debug(`Closing PIT ${this.formatPit(pitId)}`);
await this.clients.es.asCurrentUser.closePointInTime({ body: { id: pitId } });
} else {
this.logger.warn(`No PIT ID to clear!`);
logger.warn(`No PIT ID to clear!`);
}
} catch (err) {
this.logger.error(err);
logger.error(err);
warnings.push(i18nTexts.csvUnableToClosePit());
}
this.logger.info(`Finished generating. Row count: ${this.csvRowCount}.`);
logger.info(`Finished generating. Row count: ${this.csvRowCount}.`);
if (!this.maxSizeReached && this.csvRowCount !== totalRecords) {
this.logger.warn(
logger.warn(
`ES scroll returned fewer total hits than expected! ` +
`Search result total hits: ${totalRecords}. Row count: ${this.csvRowCount}`
);

View file

@ -19,5 +19,6 @@
"@kbn/utility-types",
"@kbn/screenshotting-plugin",
"@kbn/i18n",
"@kbn/task-manager-plugin",
]
}

View file

@ -6,6 +6,7 @@
* Side Public License, v 1.
*/
import type { ConcreteTaskInstance } from '@kbn/task-manager-plugin/server';
import type {
LayoutParams,
PerformanceMetrics as ScreenshotMetrics,
@ -82,6 +83,11 @@ export interface BasePayload extends BaseParams {
isDeprecated?: boolean;
}
/**
* Timestamp metrics about the task lifecycle
*/
export type TaskInstanceFields = Pick<ConcreteTaskInstance, 'startedAt' | 'retryAt'>;
export type JobId = string;
/**

View file

@ -19,19 +19,20 @@ jest.mock('@kbn/generate-csv', () => ({
import nodeCrypto from '@elastic/node-crypto';
import { coreMock, elasticsearchServiceMock, loggingSystemMock } from '@kbn/core/server/mocks';
import { Writable } from 'stream';
import { CancellationToken } from '@kbn/reporting-common';
import { dataPluginMock } from '@kbn/data-plugin/server/mocks';
import { discoverPluginMock } from '@kbn/discover-plugin/server/mocks';
import { createFieldFormatsStartMock } from '@kbn/field-formats-plugin/server/mocks';
import { dataPluginMock } from '@kbn/data-plugin/server/mocks';
import { setFieldFormats } from '@kbn/reporting-server';
import { CancellationToken } from '@kbn/reporting-common';
import { createMockConfigSchema } from '@kbn/reporting-mocks-server';
import { setFieldFormats } from '@kbn/reporting-server';
import { Writable } from 'stream';
import { CsvSearchSourceExportType } from '.';
const mockLogger = loggingSystemMock.createLogger();
const encryptionKey = 'tetkey';
const headers = { sid: 'cooltestheaders' };
const taskInstanceFields = { startedAt: null, retryAt: null };
let encryptedHeaders: string;
let stream: jest.Mocked<Writable>;
let mockCsvSearchSourceExportType: CsvSearchSourceExportType;
@ -92,6 +93,7 @@ test('gets the csv content from job parameters', async () => {
title: 'Test Search',
version: '7.13.0',
},
taskInstanceFields,
new CancellationToken(),
stream
);
@ -103,3 +105,24 @@ test('gets the csv content from job parameters', async () => {
}
`);
});
test('uses the provided logger', async () => {
const logSpy = jest.spyOn(mockLogger, 'get');
await mockCsvSearchSourceExportType.runTask(
'cool-job-id',
{
headers: encryptedHeaders,
browserTimezone: 'US/Alaska',
searchSource: {},
objectType: 'search',
title: 'Test Search',
version: '7.13.0',
},
taskInstanceFields,
new CancellationToken(),
stream
);
expect(logSpy).toHaveBeenCalledWith('execute-job:cool-job-id');
});

View file

@ -20,6 +20,7 @@ import {
LICENSE_TYPE_PLATINUM,
LICENSE_TYPE_TRIAL,
} from '@kbn/reporting-common';
import { TaskInstanceFields } from '@kbn/reporting-common/types';
import {
CSV_JOB_TYPE,
CSV_REPORT_TYPE,
@ -72,11 +73,13 @@ export class CsvSearchSourceExportType extends ExportType<
public runTask = async (
jobId: string,
job: TaskPayloadCSV,
taskInstanceFields: TaskInstanceFields,
cancellationToken: CancellationToken,
stream: Writable
) => {
const { encryptionKey, csv: csvConfig } = this.config;
const logger = this.logger.get(`execute-job:${jobId}`);
const { encryptionKey, csv: csvConfig } = this.config;
const headers = await decryptJobHeaders(encryptionKey, job.headers, logger);
const fakeRequest = this.getFakeRequest(headers, job.spaceId, logger);
const uiSettings = await this.getUiSettingsClient(fakeRequest, logger);
@ -99,6 +102,7 @@ export class CsvSearchSourceExportType extends ExportType<
const csv = new CsvGenerator(
job,
csvConfig,
taskInstanceFields,
clients,
dependencies,
cancellationToken,

View file

@ -108,9 +108,11 @@ export class CsvSearchSourceImmediateExportType extends ExportType<
};
const cancellationToken = new CancellationToken();
const csvConfig = this.config.csv;
const taskInstanceFields = { startedAt: null, retryAt: null };
const csv = new CsvGenerator(
job,
csvConfig,
taskInstanceFields,
clients,
dependencies,
cancellationToken,

View file

@ -22,6 +22,7 @@ import {
LICENSE_TYPE_PLATINUM,
LICENSE_TYPE_TRIAL,
} from '@kbn/reporting-common';
import { TaskInstanceFields } from '@kbn/reporting-common/types';
import {
CSV_REPORT_TYPE_V2,
JobParamsCsvFromSavedObject,
@ -102,12 +103,14 @@ export class CsvV2ExportType extends ExportType<
public runTask = async (
jobId: string,
job: TaskPayloadCsvFromSavedObject,
taskInstanceFields: TaskInstanceFields,
cancellationToken: CancellationToken,
stream: Writable
) => {
const logger = this.logger.get(`execute:${jobId}`);
const config = this.config;
const { encryptionKey, csv: csvConfig } = config;
const logger = this.logger.get(`execute:${jobId}`);
const headers = await decryptJobHeaders(encryptionKey, job.headers, logger);
const fakeRequest = this.getFakeRequest(headers, job.spaceId, logger);
@ -137,6 +140,7 @@ export class CsvV2ExportType extends ExportType<
...job,
},
csvConfig,
taskInstanceFields,
clients,
dependencies,
cancellationToken,

View file

@ -15,6 +15,7 @@ import { TaskPayloadPDF } from '@kbn/reporting-export-types-pdf-common';
import { createMockConfigSchema } from '@kbn/reporting-mocks-server';
import { cryptoFactory } from '@kbn/reporting-server';
import { createMockScreenshottingStart } from '@kbn/screenshotting-plugin/server/mock';
import { PdfV1ExportType } from '.';
let content: string;
@ -22,6 +23,7 @@ let mockPdfExportType: PdfV1ExportType;
let stream: jest.Mocked<Writable>;
const cancellationToken = new CancellationToken();
const taskInstanceFields = { startedAt: null, retryAt: null };
const mockLogger = loggingSystemMock.createLogger();
const mockEncryptionKey = 'testencryptionkey';
@ -55,7 +57,9 @@ beforeEach(async () => {
uiSettings: mockCoreStart.uiSettings,
screenshotting: screenshottingMock,
});
getScreenshotsSpy.mockImplementation(() => {
getScreenshotsSpy.mockImplementation((opts) => {
const { logger } = opts;
logger?.get('screenshotting');
return Rx.of({
metrics: { cpu: 0, pages: 1 },
data: Buffer.from(testContent),
@ -65,7 +69,7 @@ beforeEach(async () => {
});
});
test(`passes browserTimezone to generatePdf`, async () => {
test(`passes browserTimezone to getScreenshots`, async () => {
const encryptedHeaders = await encryptHeaders({});
const browserTimezone = 'UTC';
@ -76,19 +80,14 @@ test(`passes browserTimezone to generatePdf`, async () => {
headers: encryptedHeaders,
objects: [{ relativeUrl: '/app/kibana#/something' }],
}),
taskInstanceFields,
cancellationToken,
stream
);
expect(getScreenshotsSpy).toHaveBeenCalledWith({
browserTimezone: 'UTC',
format: 'pdf',
headers: {},
layout: undefined,
logo: false,
title: undefined,
urls: ['http://localhost:80/mock-server-basepath/app/kibana#/something'],
});
expect(getScreenshotsSpy).toHaveBeenCalledWith(
expect.objectContaining({ browserTimezone: 'UTC' })
);
});
test(`returns content_type of application/pdf`, async () => {
@ -97,20 +96,37 @@ test(`returns content_type of application/pdf`, async () => {
const { content_type: contentType } = await mockPdfExportType.runTask(
'pdfJobId',
getBasePayload({ objects: [], headers: encryptedHeaders }),
taskInstanceFields,
cancellationToken,
stream
);
expect(contentType).toBe('application/pdf');
});
test(`returns content of generatePdf getBuffer base64 encoded`, async () => {
test(`returns buffer content base64 encoded`, async () => {
const encryptedHeaders = await encryptHeaders({});
await mockPdfExportType.runTask(
'pdfJobId',
getBasePayload({ objects: [], headers: encryptedHeaders }),
taskInstanceFields,
cancellationToken,
stream
);
expect(content).toEqual(testContent);
});
test(`screenshotting plugin uses the logger provided by the PDF export-type`, async () => {
const logSpy = jest.spyOn(mockLogger, 'get');
const encryptedHeaders = await encryptHeaders({});
await mockPdfExportType.runTask(
'pdfJobId',
getBasePayload({ objects: [], headers: encryptedHeaders }),
taskInstanceFields,
cancellationToken,
stream
);
expect(logSpy).toHaveBeenCalledWith('screenshotting');
});

View file

@ -21,7 +21,7 @@ import {
LICENSE_TYPE_TRIAL,
REPORTING_TRANSACTION_TYPE,
} from '@kbn/reporting-common';
import { TaskRunResult } from '@kbn/reporting-common/types';
import { TaskInstanceFields, TaskRunResult } from '@kbn/reporting-common/types';
import {
JobParamsPDFDeprecated,
PDF_JOB_TYPE,
@ -73,6 +73,7 @@ export class PdfV1ExportType extends ExportType<JobParamsPDFDeprecated, TaskPayl
public runTask = async (
jobId: string,
job: TaskPayloadPDF,
taskInstanceFields: TaskInstanceFields,
cancellationToken: CancellationToken,
stream: Writable
) => {
@ -108,6 +109,8 @@ export class PdfV1ExportType extends ExportType<JobParamsPDFDeprecated, TaskPayl
browserTimezone,
headers,
layout,
taskInstanceFields,
logger,
})
.pipe(
tap(({ metrics }) => {

View file

@ -23,6 +23,7 @@ let mockPdfExportType: PdfExportType;
let stream: jest.Mocked<Writable>;
const cancellationToken = new CancellationToken();
const taskInstanceFields = { startedAt: null, retryAt: null };
const mockLogger = loggingSystemMock.createLogger();
const mockEncryptionKey = 'testencryptionkey';
@ -64,7 +65,9 @@ beforeEach(async () => {
screenshotting: screenshottingMock,
});
getScreenshotsSpy.mockImplementation(() => {
getScreenshotsSpy.mockImplementation((opts) => {
const { logger } = opts;
logger?.get('screenshotting');
return Rx.of({
metrics: { cpu: 0, pages: 1 },
data: Buffer.from(testContent),
@ -74,7 +77,7 @@ beforeEach(async () => {
});
});
test(`passes browserTimezone to generatePdf`, async () => {
test(`passes browserTimezone to getScreenshots`, async () => {
const browserTimezone = 'UTC';
await mockPdfExportType.runTask(
'pdfJobId',
@ -86,24 +89,14 @@ test(`passes browserTimezone to generatePdf`, async () => {
browserTimezone,
headers: encryptedHeaders,
}),
taskInstanceFields,
cancellationToken,
stream
);
expect(getScreenshotsSpy).toHaveBeenCalledWith({
browserTimezone: 'UTC',
format: 'pdf',
headers: {},
layout: { dimensions: {} },
logo: false,
title: 'PDF Params Timezone Test',
urls: [
[
'http://localhost:80/mock-server-basepath/app/reportingRedirect?forceNow=test',
{ __REPORTING_REDIRECT_LOCATOR_STORE_KEY__: { id: 'test', version: 'test' } },
],
],
});
expect(getScreenshotsSpy).toHaveBeenCalledWith(
expect.objectContaining({ browserTimezone: 'UTC' })
);
});
test(`returns content_type of application/pdf`, async () => {
@ -114,13 +107,14 @@ test(`returns content_type of application/pdf`, async () => {
locatorParams: [{ version: 'test', id: 'test' }] as LocatorParams[],
headers: encryptedHeaders,
}),
taskInstanceFields,
cancellationToken,
stream
);
expect(contentType).toBe('application/pdf');
});
test(`returns content of generatePdf getBuffer base64 encoded`, async () => {
test(`returns buffer content base64 encoded`, async () => {
await mockPdfExportType.runTask(
'pdfJobId',
getBasePayload({
@ -128,9 +122,28 @@ test(`returns content of generatePdf getBuffer base64 encoded`, async () => {
locatorParams: [{ version: 'test', id: 'test' }] as LocatorParams[],
headers: encryptedHeaders,
}),
taskInstanceFields,
cancellationToken,
stream
);
expect(content).toEqual(testContent);
});
test(`screenshotting plugin uses the logger provided by the PDF export-type`, async () => {
const logSpy = jest.spyOn(mockLogger, 'get');
await mockPdfExportType.runTask(
'pdfJobId',
getBasePayload({
layout: { dimensions: {} },
locatorParams: [{ version: 'test', id: 'test' }] as LocatorParams[],
headers: encryptedHeaders,
}),
taskInstanceFields,
cancellationToken,
stream
);
expect(logSpy).toHaveBeenCalledWith('screenshotting');
});

View file

@ -22,7 +22,7 @@ import {
REPORTING_REDIRECT_LOCATOR_STORE_KEY,
REPORTING_TRANSACTION_TYPE,
} from '@kbn/reporting-common';
import type { TaskRunResult } from '@kbn/reporting-common/types';
import type { TaskInstanceFields, TaskRunResult } from '@kbn/reporting-common/types';
import type { TaskPayloadPDFV2 } from '@kbn/reporting-export-types-pdf-common';
import {
JobParamsPDFV2,
@ -78,6 +78,7 @@ export class PdfExportType extends ExportType<JobParamsPDFV2, TaskPayloadPDFV2>
public runTask = (
jobId: string,
payload: TaskPayloadPDFV2,
taskInstanceFields: TaskInstanceFields,
cancellationToken: CancellationToken,
stream: Writable
) => {
@ -130,6 +131,8 @@ export class PdfExportType extends ExportType<JobParamsPDFV2, TaskPayloadPDFV2>
? url
: [url[0], { [REPORTING_REDIRECT_LOCATOR_STORE_KEY]: url[1] }]
),
taskInstanceFields,
logger,
})
.pipe(
tap(({ metrics }) => {

View file

@ -24,6 +24,7 @@ let mockPngExportType: PngExportType;
let stream: jest.Mocked<Writable>;
const cancellationToken = new CancellationToken();
const taskInstanceFields = { startedAt: null, retryAt: null };
const mockLogger = loggingSystemMock.createLogger();
const mockEncryptionKey = 'abcabcsecuresecret';
@ -61,7 +62,9 @@ beforeEach(async () => {
screenshotting: screenshottingMock,
});
getScreenshotsSpy.mockImplementation(() => {
getScreenshotsSpy.mockImplementation((opts) => {
const { logger } = opts;
logger?.get('screenshotting');
return Rx.of({
metrics: { cpu: 0 },
results: [{ screenshots: [{ data: Buffer.from(testContent) }] }] as CaptureResult['results'],
@ -69,7 +72,7 @@ beforeEach(async () => {
});
});
test(`passes browserTimezone to generatePng`, async () => {
test(`passes browserTimezone to getScreenshots`, async () => {
const browserTimezone = 'UTC';
await mockPngExportType.runTask(
'pngJobId',
@ -80,21 +83,14 @@ test(`passes browserTimezone to generatePng`, async () => {
browserTimezone,
headers: encryptedHeaders,
}),
taskInstanceFields,
cancellationToken,
stream
);
expect(getScreenshotsSpy).toHaveBeenCalledWith({
format: 'png',
headers: {},
layout: { dimensions: {}, id: 'preserve_layout' },
urls: [
[
'http://localhost:80/mock-server-basepath/app/reportingRedirect?forceNow=test',
{ __REPORTING_REDIRECT_LOCATOR_STORE_KEY__: undefined },
],
],
});
expect(getScreenshotsSpy).toHaveBeenCalledWith(
expect.objectContaining({ browserTimezone: 'UTC' })
);
});
test(`returns content_type of application/png`, async () => {
@ -105,13 +101,14 @@ test(`returns content_type of application/png`, async () => {
locatorParams: [{ version: 'test', id: 'test' }] as LocatorParams[],
headers: encryptedHeaders,
}),
taskInstanceFields,
cancellationToken,
stream
);
expect(contentType).toBe('image/png');
});
test(`returns content of generatePng getBuffer base64 encoded`, async () => {
test(`returns buffer content base64 encoded`, async () => {
await mockPngExportType.runTask(
'pngJobId',
getBasePayload({
@ -119,9 +116,28 @@ test(`returns content of generatePng getBuffer base64 encoded`, async () => {
locatorParams: [{ version: 'test', id: 'test' }] as LocatorParams[],
headers: encryptedHeaders,
}),
taskInstanceFields,
cancellationToken,
stream
);
expect(content).toEqual(testContent);
});
test(`screenshotting plugin uses the logger provided by the PNG export-type`, async () => {
const logSpy = jest.spyOn(mockLogger, 'get');
await mockPngExportType.runTask(
'pngJobId',
getBasePayload({
layout: { dimensions: {} },
locatorParams: [{ version: 'test', id: 'test' }] as LocatorParams[],
headers: encryptedHeaders,
}),
taskInstanceFields,
cancellationToken,
stream
);
expect(logSpy).toHaveBeenCalledWith('screenshotting');
});

View file

@ -31,7 +31,7 @@ import {
REPORTING_REDIRECT_LOCATOR_STORE_KEY,
REPORTING_TRANSACTION_TYPE,
} from '@kbn/reporting-common';
import type { TaskRunResult } from '@kbn/reporting-common/types';
import type { TaskInstanceFields, TaskRunResult } from '@kbn/reporting-common/types';
import {
JobParamsPNGV2,
PNG_JOB_TYPE_V2,
@ -83,6 +83,7 @@ export class PngExportType extends ExportType<JobParamsPNGV2, TaskPayloadPNGV2>
public runTask = (
jobId: string,
payload: TaskPayloadPNGV2,
taskInstanceFields: TaskInstanceFields,
cancellationToken: CancellationToken,
stream: Writable
) => {
@ -106,13 +107,9 @@ export class PngExportType extends ExportType<JobParamsPNGV2, TaskPayloadPNGV2>
apmGetAssets?.end();
apmGeneratePng = apmTrans.startSpan('generate-png-pipeline', 'execute');
const options = {
headers,
browserTimezone: payload.browserTimezone,
layout: { ...payload.layout, id: 'preserve_layout' as const },
};
if (!options.layout?.dimensions) {
const layout = { ...payload.layout, id: 'preserve_layout' as const };
if (!layout.dimensions) {
throw new Error(`LayoutParams.Dimensions is undefined.`);
}
@ -122,9 +119,12 @@ export class PngExportType extends ExportType<JobParamsPNGV2, TaskPayloadPNGV2>
return this.startDeps
.screenshotting!.getScreenshots({
format: 'png',
browserTimezone: payload.browserTimezone,
headers,
layout: { ...payload.layout, id: 'preserve_layout' },
layout,
urls: [[url, { [REPORTING_REDIRECT_LOCATOR_STORE_KEY]: locatorParams }]],
taskInstanceFields,
logger,
})
.pipe(
tap(({ metrics }) => {

View file

@ -12,7 +12,12 @@ import type { TypeOf } from '@kbn/config-schema';
import type { CustomRequestHandlerContext } from '@kbn/core-http-request-handler-context-server';
import type { KibanaRequest } from '@kbn/core-http-server';
import type { CancellationToken } from '@kbn/reporting-common';
import type { BaseParams, BasePayload, TaskRunResult } from '@kbn/reporting-common/types';
import type {
BaseParams,
BasePayload,
TaskInstanceFields,
TaskRunResult,
} from '@kbn/reporting-common/types';
import { ConfigSchema } from './config_schema';
import type { ExportType } from './export_type';
@ -38,6 +43,7 @@ export type CreateJobFn<JobParamsType = BaseParams, JobPayloadType = BasePayload
export type RunTaskFn<TaskPayloadType = BasePayload> = (
jobId: string,
payload: TaskPayloadType,
taskInstanceFields: TaskInstanceFields,
cancellationToken: CancellationToken,
stream: Writable
) => Promise<TaskRunResult>;

View file

@ -5,10 +5,10 @@
* 2.0.
*/
import { lastValueFrom } from 'rxjs';
import { schema } from '@kbn/config-schema';
import type { CoreSetup, Plugin } from '@kbn/core/server';
import type { ScreenshottingStart } from '@kbn/screenshotting-plugin/server';
import { lastValueFrom } from 'rxjs';
import { API_ENDPOINT, ScreenshottingExpressionResponse } from '../common';
interface StartDeps {
@ -28,11 +28,12 @@ export class ScreenshottingExamplePlugin implements Plugin<void, void> {
}),
},
},
async (context, request, response) => {
async (_context, request, response) => {
const [, { screenshotting }] = await getStartServices();
const { metrics, results } = await lastValueFrom(
screenshotting.getScreenshots({
request,
taskInstanceFields: { startedAt: null, retryAt: null },
expression: request.query.expression,
})
);

View file

@ -26,6 +26,7 @@ import type {
ExecutionError,
ReportDocument,
ReportOutput,
TaskInstanceFields,
TaskRunResult,
} from '@kbn/reporting-common/types';
import type { ReportingConfigType } from '@kbn/reporting-server';
@ -38,16 +39,16 @@ import { throwRetryableError } from '@kbn/task-manager-plugin/server';
import {
REPORTING_EXECUTE_TYPE,
TIME_BETWEEN_ATTEMPTS,
ReportTaskParams,
ReportingTask,
ReportingTaskStatus,
TIME_BETWEEN_ATTEMPTS,
} from '.';
import { ExportTypesRegistry, getContentStream } from '..';
import type { ReportingCore } from '../..';
import {
mapToReportingError,
isExecutionError,
mapToReportingError,
} from '../../../common/errors/map_to_reporting_error';
import type { ReportingStore } from '../store';
import { Report, SavedReport } from '../store';
@ -294,6 +295,7 @@ export class ExecuteReportTask implements ReportingTask {
public async _performJob(
task: ReportTaskParams,
taskInstanceFields: TaskInstanceFields,
cancellationToken: CancellationToken,
stream: Writable
): Promise<TaskRunResult> {
@ -306,9 +308,9 @@ export class ExecuteReportTask implements ReportingTask {
// if workerFn doesn't finish before timeout, call the cancellationToken and throw an error
const queueTimeout = durationToNumber(this.config.queue.timeout);
return Rx.lastValueFrom(
Rx.from(exportType.runTask(task.id, task.payload, cancellationToken, stream)).pipe(
timeout(queueTimeout)
) // throw an error if a value is not emitted before timeout
Rx.from(
exportType.runTask(task.id, task.payload, taskInstanceFields, cancellationToken, stream)
).pipe(timeout(queueTimeout)) // throw an error if a value is not emitted before timeout
);
}
@ -352,6 +354,12 @@ export class ExecuteReportTask implements ReportingTask {
return ({ taskInstance }: RunContext) => {
let jobId: string;
const cancellationToken = new CancellationToken();
const {
attempts: taskAttempts,
params: reportTaskParams,
retryAt: taskRetryAt,
startedAt: taskStartedAt,
} = taskInstance;
return {
/*
@ -363,11 +371,10 @@ export class ExecuteReportTask implements ReportingTask {
*/
run: async () => {
let report: SavedReport | undefined;
const isLastAttempt = taskInstance.attempts >= this.getMaxAttempts();
const isLastAttempt = taskAttempts >= this.getMaxAttempts();
// find the job in the store and set status to processing
const task = taskInstance.params as ReportTaskParams;
const task = reportTaskParams as ReportTaskParams;
jobId = task?.id;
try {
@ -430,7 +437,12 @@ export class ExecuteReportTask implements ReportingTask {
eventLog.logExecutionStart();
const output = await Promise.race<TaskRunResult>([
this._performJob(task, cancellationToken, stream),
this._performJob(
task,
{ retryAt: taskRetryAt, startedAt: taskStartedAt },
cancellationToken,
stream
),
this.throwIfKibanaShutsDown(),
]);

View file

@ -5,6 +5,8 @@
* 2.0.
*/
import type { ConcreteTaskInstance } from '@kbn/task-manager-plugin/server';
/**
* Collected performance metrics during a screenshotting session.
*/
@ -29,3 +31,9 @@ export interface PerformanceMetrics {
*/
memoryInMegabytes?: number;
}
/**
* Timestamp metrics about a running screenshot task
* which determine the maximum timeouts possible
*/
export type TaskInstanceFields = Pick<ConcreteTaskInstance, 'retryAt' | 'startedAt'>;

View file

@ -66,6 +66,7 @@ describe('Screenshot Observable Pipeline', () => {
headers: {},
layout: {},
urls: ['/welcome/home/start/index.htm'],
taskInstanceFields: { startedAt: null, retryAt: null },
};
config = {
enabled: true,

View file

@ -7,9 +7,10 @@
import type { KibanaRequest } from '@kbn/core/server';
import type { ExpressionAstExpression } from '@kbn/expressions-plugin/common';
import { Logger } from '@kbn/logging';
import type { Optional } from '@kbn/utility-types';
import { LayoutParams } from '../../common';
import { PerformanceMetrics } from '../../common/types';
import { PerformanceMetrics, TaskInstanceFields } from '../../common/types';
import {
PdfScreenshotOptions,
PdfScreenshotResult,
@ -21,6 +22,10 @@ import type { ScreenshotObservableOptions, ScreenshotObservableResult } from './
export type { ScreenshotObservableResult, UrlOrUrlWithContext } from './observable';
export interface CaptureOptions extends Optional<ScreenshotObservableOptions, 'urls'> {
/**
* Timestamp metrics about the task lifecycle which are important for calculating timeouts
*/
taskInstanceFields: TaskInstanceFields;
/**
* Expression to render. Mutually exclusive with `urls`.
*/
@ -37,6 +42,10 @@ export interface CaptureOptions extends Optional<ScreenshotObservableOptions, 'u
* Source Kibana request object from where the headers will be extracted.
*/
request?: KibanaRequest;
/**
* Optional logger object that could contain context from the caller for traceability
*/
logger?: Logger;
}
export type CaptureMetrics = PerformanceMetrics;

View file

@ -118,6 +118,7 @@ describe('class Screenshots', () => {
format: 'png',
layout: { id: 'preserve_layout' },
urls: ['/app/home/test'],
taskInstanceFields: { startedAt: null, retryAt: null },
};
const observe = screenshotsInstance.getScreenshots(options);
@ -151,6 +152,7 @@ describe('class Screenshots', () => {
format: 'png',
layout: { id: 'preserve_layout' },
urls: ['/app/home/test'],
taskInstanceFields: { startedAt: null, retryAt: null },
};
const observe = screenshotsInstance.getScreenshots(options);

View file

@ -5,12 +5,10 @@
* 2.0.
*/
import type { CloudSetup } from '@kbn/cloud-plugin/server';
import type { HttpServiceSetup, Logger, PackageInfo } from '@kbn/core/server';
import { Semaphore } from '@kbn/std';
import ipaddr from 'ipaddr.js';
import { defaultsDeep, sum } from 'lodash';
import { from, Observable, of, throwError } from 'rxjs';
import moment from 'moment';
import { Observable, from, of, throwError } from 'rxjs';
import {
catchError,
concatMap,
@ -22,12 +20,17 @@ import {
tap,
toArray,
} from 'rxjs/operators';
import type { CloudSetup } from '@kbn/cloud-plugin/server';
import type { HttpServiceSetup, Logger, PackageInfo } from '@kbn/core/server';
import { Semaphore } from '@kbn/std';
import { CaptureResult, ScreenshotOptions, ScreenshotResult } from '.';
import {
errors,
SCREENSHOTTING_APP_ID,
SCREENSHOTTING_EXPRESSION,
SCREENSHOTTING_EXPRESSION_INPUT,
errors,
} from '../../common';
import { HeadlessChromiumDriverFactory } from '../browsers';
import { systemHasInsufficientMemory } from '../cloud';
@ -41,7 +44,7 @@ import {
toPdf,
toPng,
} from '../formats';
import { createLayout, Layout } from '../layouts';
import { Layout, createLayout } from '../layouts';
import { EventLogger, Transactions } from './event_logger';
import type { ScreenshotObservableOptions } from './observable';
import { ScreenshotObservableHandler, UrlOrUrlWithContext } from './observable';
@ -68,7 +71,8 @@ export class Screenshots {
private captureScreenshots(
eventLogger: EventLogger,
layout: Layout,
options: ScreenshotObservableOptions
options: ScreenshotObservableOptions,
logger: Logger
): Observable<CaptureResult> {
const { browserTimezone } = options;
@ -79,7 +83,7 @@ export class Screenshots {
openUrlTimeout: durationToNumber(this.config.capture.timeouts.openUrl),
defaultViewport: { width: layout.width, deviceScaleFactor: layout.getBrowserZoom() },
},
this.logger
logger
)
.pipe(
this.semaphore.acquire(),
@ -98,7 +102,7 @@ export class Screenshots {
catchError((error) => {
screen.checkPageIsOpen(); // this fails the job if the browser has closed
this.logger.error(error);
logger.error(error);
eventLogger.error(error, Transactions.SCREENSHOTTING);
return of({ ...DEFAULT_SETUP_RESULT, error }); // allow "as-is" screenshot with injected warning message
}),
@ -174,17 +178,28 @@ export class Screenshots {
getScreenshots(options: PdfScreenshotOptions): Observable<PdfScreenshotResult>;
getScreenshots(options: ScreenshotOptions): Observable<ScreenshotResult>;
getScreenshots(options: ScreenshotOptions): Observable<ScreenshotResult> {
const logger = options.logger?.get('screenshotting') ?? this.logger;
const { taskInstanceFields, format, layout } = options;
const { startedAt, retryAt } = taskInstanceFields;
if (startedAt) {
logger.debug(
`Task started at: ${startedAt && moment(startedAt).format()}.` +
` Can run until: ${retryAt && moment(retryAt).format()}`
);
}
if (this.systemHasInsufficientMemory()) {
return throwError(() => new errors.InsufficientMemoryAvailableOnCloudError());
}
const eventLogger = new EventLogger(this.logger, this.config);
const eventLogger = new EventLogger(logger, this.config);
const transactionEnd = eventLogger.startTransaction(Transactions.SCREENSHOTTING);
const layout = createLayout(options.layout ?? {});
const layoutInstance = createLayout(layout ?? {});
const captureOptions = this.getCaptureOptions(options);
return this.captureScreenshots(eventLogger, layout, captureOptions).pipe(
return this.captureScreenshots(eventLogger, layoutInstance, captureOptions, logger).pipe(
tap(({ results, metrics }) => {
transactionEnd({
labels: {
@ -196,9 +211,9 @@ export class Screenshots {
});
}),
mergeMap((result) => {
switch (options.format) {
switch (format) {
case 'pdf':
return toPdf(eventLogger, this.packageInfo, layout, options, result);
return toPdf(eventLogger, this.packageInfo, layoutInstance, options, result);
default:
return toPng(result);
}

View file

@ -26,6 +26,7 @@
"@kbn/logging-mocks",
"@kbn/core-http-server",
"@kbn/core-plugins-server",
"@kbn/task-manager-plugin",
],
"exclude": [
"target/**/*",