mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
Use Data Stream for Reporting storage (#176022)
## Summary Closes https://github.com/elastic/kibana/issues/161608 * [X] Depends on https://github.com/elastic/elasticsearch/pull/97765 * [x] Depends on https://github.com/elastic/elasticsearch/pull/107581 * [x] Add create a new report job and check the details of the templated data stream. * [x] Run Discover tests in Flaky Test Runner: https://buildkite.com/elastic/kibana-flaky-test-suite-runner/builds/5999 ## Release Note Reporting internal storage has been changed from using regular indices to a data stream configuration for a more efficient sharding strategy. This change is not expected to have any impact to users. ## Screenshots ### Upgrade test (manual process) Using a report generated before this change, and a report generated after "upgrading":  Even though the two reports are in different types storage, they are still managed by the same policy:  Looking at the details of the policy shows how the different types of storage are used:  ### Log lines Initial startup in clean environment ``` [2024-05-13T13:22:49.138-07:00][INFO ][plugins.reporting.store] Creating ILM policy for reporting data stream: kibana-reporting [2024-05-13T13:22:53.337-07:00][INFO ][plugins.reporting.store] Linking ILM policy to reporting data stream: .kibana-reporting, component template: kibana-reporting@custom ``` Kibana restart with ES running continuously ``` [2024-05-13T13:24:32.733-07:00][DEBUG][plugins.reporting.store] Found ILM policy kibana-reporting; skipping creation. [2024-05-13T13:24:32.733-07:00][INFO ][plugins.reporting.store] Linking ILM policy to reporting data stream: .kibana-reporting, component template: kibana-reporting@custom ``` ### Checklist - [x] [Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html) was added for features that require explanation or tutorials - [x] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios - [ ] [Flaky Test Runner](https://ci-stats.kibana.dev/trigger_flaky_test_runner/1) was used on any tests changed ~~See https://buildkite.com/elastic/kibana-flaky-test-suite-runner/builds/5302 (internal link)~~
This commit is contained in:
parent
3fc2b895b0
commit
56383ccdde
38 changed files with 409 additions and 444 deletions
|
@ -70,7 +70,9 @@ reports, you might need to change the following settings.
|
|||
If capturing a report fails for any reason, {kib} will re-queue the report job for retry, as many times as this setting. Defaults to `3`.
|
||||
|
||||
`xpack.reporting.queue.indexInterval`::
|
||||
How often the index that stores reporting jobs rolls over to a new index. Valid values are `year`, `month`, `week`, `day`, and `hour`. Defaults to `week`.
|
||||
deprecated:[8.15.0,This setting has no effect.] How often Reporting creates a new index to store report jobs and file contents.
|
||||
Valid values are `year`, `month`, `week`, `day`, and `hour`. Defaults to `week`.
|
||||
*NOTE*: This setting exists for backwards compatibility, but is unused. Use the built-in ILM policy provided for the reporting plugin to customize the rollover of Reporting data.
|
||||
|
||||
[[xpack-reportingQueue-pollEnabled]] `xpack.reporting.queue.pollEnabled` ::
|
||||
When `true`, enables the {kib} instance to poll {es} for pending jobs and claim them for
|
||||
|
|
|
@ -67,10 +67,9 @@ NOTE: When you create a dashboard report that includes a data table or saved sea
|
|||
|
||||
. To view and manage reports, open the main menu, then click *Stack Management > Reporting*.
|
||||
|
||||
NOTE: Reports are stored in {es} and managed by the `kibana-reporting` {ilm}
|
||||
({ilm-init}) policy. By default, the policy stores reports forever. To learn
|
||||
more about {ilm-init} policies, refer to the {es}
|
||||
{ref}/index-lifecycle-management.html[{ilm-init} documentation].
|
||||
NOTE: In "stateful" deployments, reports are stored in {es} and managed by the `kibana-reporting` {ilm}
|
||||
({ilm-init}) policy. By default, the policy stores reports forever. To learn more about {ilm-init} policies, refer
|
||||
to the {es} {ref}/index-lifecycle-management.html[{ilm-init} documentation].
|
||||
|
||||
[float]
|
||||
[[csv-limitations]]
|
||||
|
|
|
@ -36,4 +36,4 @@ An example response for a successfully queued report:
|
|||
---------------------------------------------------------
|
||||
|
||||
<1> The relative path on the {kib} host for downloading the report.
|
||||
<2> (Not included in the example) Internal representation of the reporting job, as found in the `.reporting-*` index.
|
||||
<2> (Not included in the example) Internal representation of the reporting job, as found in the `.reporting-*` storage.
|
||||
|
|
|
@ -55,6 +55,7 @@ export const REPORTING_MANAGEMENT_HOME = '/app/management/insightsAndAlerting/re
|
|||
* ILM
|
||||
*/
|
||||
|
||||
// The ILM policy manages stored reports only in stateful deployments.
|
||||
export const ILM_POLICY_NAME = 'kibana-reporting';
|
||||
|
||||
/*
|
||||
|
|
|
@ -149,6 +149,7 @@ export interface ReportSource {
|
|||
migration_version: string; // for reminding the user to update their POST URL
|
||||
attempts: number; // initially populated as 0
|
||||
created_at: string; // timestamp in UTC
|
||||
'@timestamp'?: string; // creation timestamp, only used for data streams compatibility
|
||||
status: JOB_STATUS;
|
||||
|
||||
/*
|
||||
|
|
|
@ -29,12 +29,6 @@ export interface LocatorParams<P extends SerializableRecord = SerializableRecord
|
|||
params: P;
|
||||
}
|
||||
|
||||
export type IlmPolicyMigrationStatus = 'policy-not-found' | 'indices-not-managed-by-policy' | 'ok';
|
||||
|
||||
export interface IlmPolicyStatusResponse {
|
||||
status: IlmPolicyMigrationStatus;
|
||||
}
|
||||
|
||||
type Url = string;
|
||||
type UrlLocatorTuple = [url: Url, locatorParams: LocatorParams];
|
||||
|
||||
|
|
|
@ -12,7 +12,16 @@ export const PLUGIN_ID = 'reporting';
|
|||
* Storage
|
||||
*/
|
||||
|
||||
export const REPORTING_SYSTEM_INDEX = '.reporting';
|
||||
// Used to index new documents
|
||||
export const REPORTING_DATA_STREAM_ALIAS = '.kibana-reporting';
|
||||
// Used to retrieve settings
|
||||
export const REPORTING_DATA_STREAM_WILDCARD = '.kibana-reporting*';
|
||||
// Index pattern of plain indices before Reporting used Data Stream storage
|
||||
export const REPORTING_LEGACY_INDICES = '.reporting-*';
|
||||
// Used to search for all reports and check for managing privileges
|
||||
export const REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY = '.reporting-*,.kibana-reporting*';
|
||||
// Name of component template which Kibana overrides for lifecycle settings
|
||||
export const REPORTING_DATA_STREAM_COMPONENT_TEMPLATE = 'kibana-reporting@custom';
|
||||
|
||||
/*
|
||||
* Telemetry
|
||||
|
|
|
@ -24,6 +24,7 @@ export const config: PluginConfigDescriptor<ReportingConfigType> = {
|
|||
},
|
||||
schema: ConfigSchema,
|
||||
deprecations: ({ unused }) => [
|
||||
unused('queue.indexInterval', { level: 'warning' }), // unused since 8.15
|
||||
unused('capture.browser.chromium.maxScreenshotDimension', { level: 'warning' }), // unused since 7.8
|
||||
unused('capture.browser.type', { level: 'warning' }),
|
||||
unused('poll.jobCompletionNotifier.intervalErrorMultiplier', { level: 'warning' }), // unused since 7.10
|
||||
|
|
|
@ -20,7 +20,7 @@ export const registerDeprecations = ({
|
|||
core.deprecations.registerDeprecations({
|
||||
getDeprecations: async (ctx) => {
|
||||
return [
|
||||
...(await getIlmPolicyDeprecationsInfo(ctx, { reportingCore })),
|
||||
...(await getIlmPolicyDeprecationsInfo(ctx)),
|
||||
...(await getReportingRoleDeprecationsInfo(ctx, { reportingCore })),
|
||||
];
|
||||
},
|
||||
|
|
|
@ -7,10 +7,6 @@
|
|||
|
||||
import type { GetDeprecationsContext } from '@kbn/core/server';
|
||||
import { elasticsearchServiceMock, savedObjectsClientMock } from '@kbn/core/server/mocks';
|
||||
import { createMockConfigSchema } from '@kbn/reporting-mocks-server';
|
||||
|
||||
import { ReportingCore } from '../core';
|
||||
import { createMockReportingCore } from '../test_helpers';
|
||||
|
||||
import { getDeprecationsInfo } from './migrate_existing_indices_ilm_policy';
|
||||
|
||||
|
@ -21,12 +17,10 @@ type ScopedClusterClientMock = ReturnType<
|
|||
describe("Migrate existing indices' ILM policy deprecations", () => {
|
||||
let esClient: ScopedClusterClientMock;
|
||||
let deprecationsCtx: GetDeprecationsContext;
|
||||
let reportingCore: ReportingCore;
|
||||
|
||||
beforeEach(async () => {
|
||||
esClient = elasticsearchServiceMock.createScopedClusterClient();
|
||||
deprecationsCtx = { esClient, savedObjectsClient: savedObjectsClientMock.create() };
|
||||
reportingCore = await createMockReportingCore(createMockConfigSchema());
|
||||
});
|
||||
|
||||
const createIndexSettings = (lifecycleName: string) => ({
|
||||
|
@ -47,7 +41,7 @@ describe("Migrate existing indices' ILM policy deprecations", () => {
|
|||
indexB: createIndexSettings('kibana-reporting'),
|
||||
});
|
||||
|
||||
expect(await getDeprecationsInfo(deprecationsCtx, { reportingCore })).toMatchInlineSnapshot(`
|
||||
expect(await getDeprecationsInfo(deprecationsCtx)).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Object {
|
||||
"correctiveActions": Object {
|
||||
|
@ -60,7 +54,7 @@ describe("Migrate existing indices' ILM policy deprecations", () => {
|
|||
],
|
||||
},
|
||||
"level": "warning",
|
||||
"message": "New reporting indices will be managed by the \\"kibana-reporting\\" provisioned ILM policy. You must edit this policy to manage the report lifecycle. This change targets all indices prefixed with \\".reporting-*\\".",
|
||||
"message": "New reporting indices will be managed by the \\"kibana-reporting\\" provisioned ILM policy. You must edit this policy to manage the report lifecycle. This change targets the hidden system index pattern \\".kibana-reporting*\\".",
|
||||
"title": "Found reporting indices managed by custom ILM policy.",
|
||||
},
|
||||
]
|
||||
|
@ -73,14 +67,10 @@ describe("Migrate existing indices' ILM policy deprecations", () => {
|
|||
indexB: createIndexSettings('kibana-reporting'),
|
||||
});
|
||||
|
||||
expect(await getDeprecationsInfo(deprecationsCtx, { reportingCore })).toMatchInlineSnapshot(
|
||||
`Array []`
|
||||
);
|
||||
expect(await getDeprecationsInfo(deprecationsCtx)).toMatchInlineSnapshot(`Array []`);
|
||||
|
||||
esClient.asInternalUser.indices.getSettings.mockResponse({});
|
||||
|
||||
expect(await getDeprecationsInfo(deprecationsCtx, { reportingCore })).toMatchInlineSnapshot(
|
||||
`Array []`
|
||||
);
|
||||
expect(await getDeprecationsInfo(deprecationsCtx)).toMatchInlineSnapshot(`Array []`);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -8,24 +8,14 @@
|
|||
import { DeprecationsDetails, GetDeprecationsContext } from '@kbn/core/server';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { ILM_POLICY_NAME, INTERNAL_ROUTES } from '@kbn/reporting-common';
|
||||
import { ReportingCore } from '../core';
|
||||
import { deprecations } from '../lib/deprecations';
|
||||
import { REPORTING_DATA_STREAM_WILDCARD } from '@kbn/reporting-server';
|
||||
import { IlmPolicyManager } from '../lib/store';
|
||||
|
||||
interface ExtraDependencies {
|
||||
reportingCore: ReportingCore;
|
||||
}
|
||||
|
||||
export const getDeprecationsInfo = async (
|
||||
{ esClient }: GetDeprecationsContext,
|
||||
{ reportingCore }: ExtraDependencies
|
||||
): Promise<DeprecationsDetails[]> => {
|
||||
const store = await reportingCore.getStore();
|
||||
const indexPattern = store.getReportingIndexPattern();
|
||||
|
||||
const migrationStatus = await deprecations.checkIlmMigrationStatus({
|
||||
reportingCore,
|
||||
elasticsearchClient: esClient.asInternalUser,
|
||||
});
|
||||
export const getDeprecationsInfo = async ({
|
||||
esClient,
|
||||
}: GetDeprecationsContext): Promise<DeprecationsDetails[]> => {
|
||||
const ilmPolicyManager = IlmPolicyManager.create({ client: esClient.asInternalUser });
|
||||
const migrationStatus = await ilmPolicyManager.checkIlmMigrationStatus();
|
||||
|
||||
if (migrationStatus !== 'ok') {
|
||||
return [
|
||||
|
@ -35,10 +25,10 @@ export const getDeprecationsInfo = async (
|
|||
}),
|
||||
level: 'warning',
|
||||
message: i18n.translate('xpack.reporting.deprecations.migrateIndexIlmPolicyActionMessage', {
|
||||
defaultMessage: `New reporting indices will be managed by the "{reportingIlmPolicy}" provisioned ILM policy. You must edit this policy to manage the report lifecycle. This change targets all indices prefixed with "{indexPattern}".`,
|
||||
defaultMessage: `New reporting indices will be managed by the "{reportingIlmPolicy}" provisioned ILM policy. You must edit this policy to manage the report lifecycle. This change targets the hidden system index pattern "{indexPattern}".`,
|
||||
values: {
|
||||
reportingIlmPolicy: ILM_POLICY_NAME,
|
||||
indexPattern,
|
||||
indexPattern: REPORTING_DATA_STREAM_WILDCARD,
|
||||
},
|
||||
}),
|
||||
correctiveActions: {
|
||||
|
|
|
@ -122,12 +122,12 @@ describe('ContentStream', () => {
|
|||
'body.query.constant_score.filter.bool.must.0.term._id',
|
||||
'something'
|
||||
);
|
||||
expect(request2).toHaveProperty('index', 'somewhere');
|
||||
expect(request2).toHaveProperty('index', '.reporting-*,.kibana-reporting*');
|
||||
expect(request2).toHaveProperty(
|
||||
'body.query.constant_score.filter.bool.must.0.term.parent_id',
|
||||
'something'
|
||||
);
|
||||
expect(request3).toHaveProperty('index', 'somewhere');
|
||||
expect(request3).toHaveProperty('index', '.reporting-*,.kibana-reporting*');
|
||||
expect(request3).toHaveProperty(
|
||||
'body.query.constant_score.filter.bool.must.0.term.parent_id',
|
||||
'something'
|
||||
|
@ -293,8 +293,11 @@ describe('ContentStream', () => {
|
|||
1,
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
index: 'somewhere',
|
||||
index: '.kibana-reporting',
|
||||
op_type: 'create',
|
||||
refresh: 'wait_for',
|
||||
body: {
|
||||
'@timestamp': '1970-01-01T00:00:00.000Z',
|
||||
parent_id: 'something',
|
||||
output: {
|
||||
content: '34',
|
||||
|
@ -307,8 +310,11 @@ describe('ContentStream', () => {
|
|||
2,
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
index: 'somewhere',
|
||||
index: '.kibana-reporting',
|
||||
op_type: 'create',
|
||||
refresh: 'wait_for',
|
||||
body: {
|
||||
'@timestamp': '1970-01-01T00:00:00.000Z',
|
||||
parent_id: 'something',
|
||||
output: {
|
||||
content: '56',
|
||||
|
@ -335,9 +341,12 @@ describe('ContentStream', () => {
|
|||
1,
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
index: 'somewhere',
|
||||
index: '.kibana-reporting',
|
||||
op_type: 'create',
|
||||
refresh: 'wait_for',
|
||||
body: {
|
||||
parent_id: 'something',
|
||||
'@timestamp': '1970-01-01T00:00:00.000Z',
|
||||
output: {
|
||||
content: Buffer.from('456').toString('base64'),
|
||||
chunk: 1,
|
||||
|
@ -349,9 +358,12 @@ describe('ContentStream', () => {
|
|||
2,
|
||||
expect.objectContaining({
|
||||
id: expect.any(String),
|
||||
index: 'somewhere',
|
||||
index: '.kibana-reporting',
|
||||
op_type: 'create',
|
||||
refresh: 'wait_for',
|
||||
body: {
|
||||
parent_id: 'something',
|
||||
'@timestamp': '1970-01-01T00:00:00.000Z',
|
||||
output: {
|
||||
content: Buffer.from('78').toString('base64'),
|
||||
chunk: 2,
|
||||
|
|
|
@ -8,9 +8,13 @@
|
|||
import { Duplex } from 'stream';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import type * as estypes from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
import type { estypes } from '@elastic/elasticsearch';
|
||||
import type { ElasticsearchClient, Logger } from '@kbn/core/server';
|
||||
import type { ReportSource } from '@kbn/reporting-common/types';
|
||||
import {
|
||||
REPORTING_DATA_STREAM_ALIAS,
|
||||
REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY,
|
||||
} from '@kbn/reporting-server';
|
||||
import type { ReportingCore } from '..';
|
||||
|
||||
const ONE_MB = 1024 * 1024;
|
||||
|
@ -31,6 +35,7 @@ interface ChunkOutput {
|
|||
}
|
||||
|
||||
interface ChunkSource {
|
||||
'@timestamp': string;
|
||||
parent_id: string;
|
||||
output: ChunkOutput;
|
||||
}
|
||||
|
@ -90,7 +95,7 @@ export class ContentStream extends Duplex {
|
|||
|
||||
private async readHead() {
|
||||
const { id, index } = this.document;
|
||||
const body: SearchRequest['body'] = {
|
||||
const body: SearchRequest = {
|
||||
_source: { includes: ['output.content', 'output.size', 'jobtype'] },
|
||||
query: {
|
||||
constant_score: {
|
||||
|
@ -110,13 +115,14 @@ export class ContentStream extends Duplex {
|
|||
const hits = response?.hits?.hits?.[0];
|
||||
|
||||
this.jobSize = hits?._source?.output?.size;
|
||||
this.logger.debug(`Reading job of size ${this.jobSize}`);
|
||||
|
||||
return hits?._source?.output?.content;
|
||||
}
|
||||
|
||||
private async readChunk() {
|
||||
const { id, index } = this.document;
|
||||
const body: SearchRequest['body'] = {
|
||||
const { id } = this.document;
|
||||
const body: SearchRequest = {
|
||||
_source: { includes: ['output.content'] },
|
||||
query: {
|
||||
constant_score: {
|
||||
|
@ -132,7 +138,10 @@ export class ContentStream extends Duplex {
|
|||
|
||||
this.logger.debug(`Reading chunk #${this.chunksRead}.`);
|
||||
|
||||
const response = await this.client.search<ChunkSource>({ body, index });
|
||||
const response = await this.client.search<ChunkSource>({
|
||||
body,
|
||||
index: REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY,
|
||||
});
|
||||
const hits = response?.hits?.hits?.[0];
|
||||
|
||||
return hits?._source?.output.content;
|
||||
|
@ -179,10 +188,11 @@ export class ContentStream extends Duplex {
|
|||
}
|
||||
|
||||
private async writeHead(content: string) {
|
||||
this.logger.debug(`Updating report contents.`);
|
||||
this.logger.debug(`Updating chunk #0 (${this.document.id}).`);
|
||||
|
||||
const body = await this.client.update<ReportSource>({
|
||||
...this.document,
|
||||
refresh: 'wait_for',
|
||||
body: {
|
||||
doc: {
|
||||
output: { content },
|
||||
|
@ -194,16 +204,19 @@ export class ContentStream extends Duplex {
|
|||
}
|
||||
|
||||
private async writeChunk(content: string) {
|
||||
const { id: parentId, index } = this.document;
|
||||
const { id: parentId } = this.document;
|
||||
const id = uuidv4();
|
||||
|
||||
this.logger.debug(`Writing chunk #${this.chunksWritten} (${id}).`);
|
||||
|
||||
await this.client.index<ChunkSource>({
|
||||
id,
|
||||
index,
|
||||
index: REPORTING_DATA_STREAM_ALIAS,
|
||||
refresh: 'wait_for',
|
||||
op_type: 'create',
|
||||
body: {
|
||||
parent_id: parentId,
|
||||
'@timestamp': new Date(0).toISOString(), // required for data streams compatibility
|
||||
output: {
|
||||
content,
|
||||
chunk: this.chunksWritten,
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { IlmPolicyMigrationStatus } from '@kbn/reporting-common/url';
|
||||
import { ILM_POLICY_NAME } from '@kbn/reporting-common';
|
||||
import { IlmPolicyManager } from '../store/ilm_policy_manager';
|
||||
import type { DeprecationsDependencies } from './types';
|
||||
|
||||
export const checkIlmMigrationStatus = async ({
|
||||
reportingCore,
|
||||
elasticsearchClient,
|
||||
}: DeprecationsDependencies): Promise<IlmPolicyMigrationStatus> => {
|
||||
const ilmPolicyManager = IlmPolicyManager.create({ client: elasticsearchClient });
|
||||
if (!(await ilmPolicyManager.doesIlmPolicyExist())) {
|
||||
return 'policy-not-found';
|
||||
}
|
||||
|
||||
const store = await reportingCore.getStore();
|
||||
const indexPattern = store.getReportingIndexPattern();
|
||||
|
||||
const reportingIndicesSettings = await elasticsearchClient.indices.getSettings({
|
||||
index: indexPattern,
|
||||
});
|
||||
|
||||
const hasUnmanagedIndices = Object.values(reportingIndicesSettings).some((settings) => {
|
||||
return (
|
||||
settings?.settings?.index?.lifecycle?.name !== ILM_POLICY_NAME &&
|
||||
settings?.settings?.['index.lifecycle']?.name !== ILM_POLICY_NAME
|
||||
);
|
||||
});
|
||||
|
||||
return hasUnmanagedIndices ? 'indices-not-managed-by-policy' : 'ok';
|
||||
};
|
|
@ -9,7 +9,6 @@ import { errors } from '@elastic/elasticsearch';
|
|||
import Boom from '@hapi/boom';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { DeprecationsDetails, DocLinksServiceSetup } from '@kbn/core/server';
|
||||
import { checkIlmMigrationStatus } from './check_ilm_migration_status';
|
||||
|
||||
function deprecationError(
|
||||
title: string,
|
||||
|
@ -83,7 +82,6 @@ function getDetailedErrorMessage(error: any): string {
|
|||
}
|
||||
|
||||
export const deprecations = {
|
||||
checkIlmMigrationStatus,
|
||||
deprecationError,
|
||||
getDetailedErrorMessage,
|
||||
getErrorStatusCode,
|
||||
|
|
|
@ -6,9 +6,7 @@
|
|||
*/
|
||||
|
||||
import type { ElasticsearchClient } from '@kbn/core/server';
|
||||
import type { ReportingCore } from '../../core';
|
||||
|
||||
export interface DeprecationsDependencies {
|
||||
reportingCore: ReportingCore;
|
||||
elasticsearchClient: ElasticsearchClient;
|
||||
}
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { IlmPutLifecycleRequest } from '@elastic/elasticsearch/lib/api/typesWithBodyKey';
|
||||
|
||||
export const reportingIlmPolicy: IlmPutLifecycleRequest['body'] = {
|
||||
policy: {
|
||||
phases: {
|
||||
hot: {
|
||||
actions: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
|
@ -5,12 +5,19 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { estypes } from '@elastic/elasticsearch';
|
||||
import type { ElasticsearchClient } from '@kbn/core/server';
|
||||
import { ILM_POLICY_NAME } from '@kbn/reporting-common';
|
||||
import { reportingIlmPolicy } from './constants';
|
||||
import { IlmPolicyMigrationStatus } from '@kbn/reporting-common/types';
|
||||
import {
|
||||
REPORTING_DATA_STREAM_ALIAS,
|
||||
REPORTING_DATA_STREAM_COMPONENT_TEMPLATE,
|
||||
REPORTING_DATA_STREAM_WILDCARD,
|
||||
REPORTING_LEGACY_INDICES,
|
||||
} from '@kbn/reporting-server';
|
||||
|
||||
/**
|
||||
* Responsible for detecting and provisioning the reporting ILM policy.
|
||||
* Responsible for detecting and provisioning the reporting ILM policy in stateful deployments.
|
||||
*
|
||||
* Uses the provided {@link ElasticsearchClient} to scope request privileges.
|
||||
*/
|
||||
|
@ -21,6 +28,9 @@ export class IlmPolicyManager {
|
|||
return new IlmPolicyManager(opts.client);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the ILM policy exists
|
||||
*/
|
||||
public async doesIlmPolicyExist(): Promise<boolean> {
|
||||
try {
|
||||
await this.client.ilm.getLifecycle({ name: ILM_POLICY_NAME });
|
||||
|
@ -33,13 +43,95 @@ export class IlmPolicyManager {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is automatically called on the Stack Management > Reporting page, by the `` API for users with
|
||||
* privilege to manage ILM, to notify them when attention is needed to update the policy for any reason.
|
||||
*/
|
||||
public async checkIlmMigrationStatus(): Promise<IlmPolicyMigrationStatus> {
|
||||
if (!(await this.doesIlmPolicyExist())) {
|
||||
return 'policy-not-found';
|
||||
}
|
||||
|
||||
const [reportingDataStreamSettings, reportingLegacyIndexSettings] = await Promise.all([
|
||||
this.client.indices.getSettings({
|
||||
index: REPORTING_DATA_STREAM_WILDCARD,
|
||||
}),
|
||||
this.client.indices.getSettings({
|
||||
index: REPORTING_LEGACY_INDICES,
|
||||
}),
|
||||
]);
|
||||
|
||||
const hasUnmanaged = (settings: estypes.IndicesIndexState) => {
|
||||
return (
|
||||
settings?.settings?.index?.lifecycle?.name !== ILM_POLICY_NAME &&
|
||||
settings?.settings?.['index.lifecycle']?.name !== ILM_POLICY_NAME
|
||||
);
|
||||
};
|
||||
|
||||
const hasUnmanagedDataStream = Object.values(reportingDataStreamSettings).some(hasUnmanaged);
|
||||
const hasUnmanagedIndices = Object.values(reportingLegacyIndexSettings).some(hasUnmanaged);
|
||||
|
||||
return hasUnmanagedDataStream || hasUnmanagedIndices ? 'indices-not-managed-by-policy' : 'ok';
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the Reporting ILM policy
|
||||
*/
|
||||
public async createIlmPolicy(): Promise<void> {
|
||||
await this.client.ilm.putLifecycle({
|
||||
name: ILM_POLICY_NAME,
|
||||
body: reportingIlmPolicy,
|
||||
policy: { phases: { hot: { actions: {} } } },
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the Data Stream index template with a link to the Reporting ILM policy
|
||||
*/
|
||||
public async linkIlmPolicy() {
|
||||
const putTemplateAcknowledged = await this.client.cluster.putComponentTemplate({
|
||||
name: REPORTING_DATA_STREAM_COMPONENT_TEMPLATE,
|
||||
template: { settings: { lifecycle: { name: ILM_POLICY_NAME } } },
|
||||
create: false,
|
||||
});
|
||||
|
||||
let backingIndicesAcknowledged: { acknowledged: boolean | null } = { acknowledged: null };
|
||||
const backingIndicesExist = await this.client.indices.exists({
|
||||
index: REPORTING_DATA_STREAM_ALIAS,
|
||||
expand_wildcards: ['hidden'],
|
||||
});
|
||||
if (backingIndicesExist) {
|
||||
backingIndicesAcknowledged = await this.client.indices.putSettings({
|
||||
index: REPORTING_DATA_STREAM_ALIAS,
|
||||
settings: { lifecycle: { name: ILM_POLICY_NAME } },
|
||||
});
|
||||
}
|
||||
|
||||
return { putTemplateResponse: putTemplateAcknowledged, backingIndicesAcknowledged };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update datastream to use ILM policy. If legacy indices exist, this attempts to link
|
||||
* the ILM policy to them as well.
|
||||
*/
|
||||
public async migrateIndicesToIlmPolicy() {
|
||||
const {
|
||||
putTemplateResponse: { acknowledged: putTemplateAcknowledged },
|
||||
backingIndicesAcknowledged: { acknowledged: backingIndicesAcknowledged },
|
||||
} = await this.linkIlmPolicy();
|
||||
|
||||
let legacyAcknowledged: boolean | null = null;
|
||||
const legacyExists = await this.client.indices.exists({
|
||||
index: REPORTING_LEGACY_INDICES,
|
||||
expand_wildcards: ['hidden'],
|
||||
});
|
||||
if (legacyExists) {
|
||||
const { acknowledged } = await this.client.indices.putSettings({
|
||||
index: REPORTING_LEGACY_INDICES,
|
||||
settings: { lifecycle: { name: ILM_POLICY_NAME } },
|
||||
});
|
||||
legacyAcknowledged = acknowledged;
|
||||
}
|
||||
|
||||
return { putTemplateAcknowledged, backingIndicesAcknowledged, legacyAcknowledged };
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,5 +5,4 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
export { reportingIlmPolicy } from './constants';
|
||||
export { IlmPolicyManager } from './ilm_policy_manager';
|
||||
|
|
|
@ -1,93 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
export const mapping = {
|
||||
meta: {
|
||||
// We are indexing these properties with both text and keyword fields
|
||||
// because that's what will be auto generated when an index already exists.
|
||||
properties: {
|
||||
// ID of the app this report: search, visualization or dashboard, etc
|
||||
objectType: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256,
|
||||
},
|
||||
},
|
||||
},
|
||||
layout: {
|
||||
type: 'text',
|
||||
fields: {
|
||||
keyword: {
|
||||
type: 'keyword',
|
||||
ignore_above: 256,
|
||||
},
|
||||
},
|
||||
},
|
||||
isDeprecated: {
|
||||
type: 'boolean',
|
||||
},
|
||||
},
|
||||
},
|
||||
migration_version: { type: 'keyword' }, // new field (7.14) to distinguish reports that were scheduled with Task Manager
|
||||
jobtype: { type: 'keyword' },
|
||||
payload: { type: 'object', enabled: false },
|
||||
priority: { type: 'byte' }, // TODO: remove: this is unused
|
||||
timeout: { type: 'long' },
|
||||
process_expiration: { type: 'date' },
|
||||
created_by: { type: 'keyword' }, // `null` if security is disabled
|
||||
created_at: { type: 'date' },
|
||||
started_at: { type: 'date' },
|
||||
completed_at: { type: 'date' },
|
||||
attempts: { type: 'short' },
|
||||
max_attempts: { type: 'short' },
|
||||
kibana_name: { type: 'keyword' },
|
||||
kibana_id: { type: 'keyword' },
|
||||
status: { type: 'keyword' },
|
||||
parent_id: { type: 'keyword' },
|
||||
output: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
error_code: { type: 'keyword' },
|
||||
chunk: { type: 'long' },
|
||||
content_type: { type: 'keyword' },
|
||||
size: { type: 'long' },
|
||||
content: { type: 'object', enabled: false },
|
||||
},
|
||||
},
|
||||
metrics: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
csv: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
rows: { type: 'long' },
|
||||
},
|
||||
},
|
||||
pdf: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
pages: { type: 'long' },
|
||||
cpu: { type: 'double' },
|
||||
cpuInPercentage: { type: 'double' },
|
||||
memory: { type: 'long' },
|
||||
memoryInMegabytes: { type: 'double' },
|
||||
},
|
||||
},
|
||||
png: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
cpu: { type: 'double' },
|
||||
cpuInPercentage: { type: 'double' },
|
||||
memory: { type: 'long' },
|
||||
memoryInMegabytes: { type: 'double' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
} as const;
|
|
@ -124,6 +124,9 @@ describe('Class Report', () => {
|
|||
|
||||
it('throws error if converted to task JSON before being synced with ES storage', () => {
|
||||
const report = new Report({ jobtype: 'spam', payload: {} } as any);
|
||||
// @ts-ignore null is not applicable to string
|
||||
report._index = null;
|
||||
|
||||
expect(() => report.updateWithEsDoc(report)).toThrowErrorMatchingInlineSnapshot(
|
||||
`"Report object from ES has missing fields!"`
|
||||
);
|
||||
|
|
|
@ -10,6 +10,7 @@ import moment from 'moment';
|
|||
import { v4 as uuidv4 } from 'uuid';
|
||||
|
||||
import { JOB_STATUS } from '@kbn/reporting-common';
|
||||
import { REPORTING_DATA_STREAM_ALIAS } from '@kbn/reporting-server';
|
||||
import {
|
||||
ReportApiJSON,
|
||||
ReportDocumentHead,
|
||||
|
@ -25,7 +26,7 @@ export const MIGRATION_VERSION = '7.14.0';
|
|||
* Class for an ephemeral report document: possibly is not saved in Elasticsearch
|
||||
*/
|
||||
export class Report implements Partial<ReportSource & ReportDocumentHead> {
|
||||
public _index?: string;
|
||||
public _index: string;
|
||||
public _id: string;
|
||||
public _primary_term?: number; // set by ES
|
||||
public _seq_no?: number; // set by ES
|
||||
|
@ -63,7 +64,7 @@ export class Report implements Partial<ReportSource & ReportDocumentHead> {
|
|||
*/
|
||||
constructor(opts: Partial<ReportSource> & Partial<ReportDocumentHead>, fields?: ReportFields) {
|
||||
this._id = opts._id != null ? opts._id : uuidv4();
|
||||
this._index = opts._index;
|
||||
this._index = opts._index ?? REPORTING_DATA_STREAM_ALIAS; // Sets the value to the data stream, unless it's a stored report and we know the name of the backing index
|
||||
this._primary_term = opts._primary_term;
|
||||
this._seq_no = opts._seq_no;
|
||||
|
||||
|
@ -167,7 +168,7 @@ export class Report implements Partial<ReportSource & ReportDocumentHead> {
|
|||
toApiJSON(): ReportApiJSON {
|
||||
return {
|
||||
id: this._id,
|
||||
index: this._index!,
|
||||
index: this._index ?? REPORTING_DATA_STREAM_ALIAS,
|
||||
kibana_name: this.kibana_name,
|
||||
kibana_id: this.kibana_id,
|
||||
jobtype: this.jobtype,
|
||||
|
|
|
@ -80,70 +80,6 @@ describe('ReportingStore', () => {
|
|||
);
|
||||
});
|
||||
|
||||
it('handles error creating the index', async () => {
|
||||
// setup
|
||||
mockEsClient.indices.exists.mockResponse(false);
|
||||
mockEsClient.indices.create.mockRejectedValue(new Error('horrible error'));
|
||||
|
||||
const store = new ReportingStore(mockCore, mockLogger);
|
||||
const mockReport = new Report({
|
||||
_index: '.reporting-errortest',
|
||||
jobtype: 'unknowntype',
|
||||
payload: {},
|
||||
meta: {},
|
||||
} as any);
|
||||
await expect(store.addReport(mockReport)).rejects.toMatchInlineSnapshot(
|
||||
`[Error: horrible error]`
|
||||
);
|
||||
});
|
||||
|
||||
/* Creating the index will fail, if there were multiple jobs staged in
|
||||
* parallel and creation completed from another Kibana instance. Only the
|
||||
* first request in line can successfully create it.
|
||||
* In spite of that race condition, adding the new job in Elasticsearch is
|
||||
* fine.
|
||||
*/
|
||||
it('ignores index creation error if the index already exists and continues adding the report', async () => {
|
||||
// setup
|
||||
mockEsClient.indices.exists.mockResponse(false);
|
||||
mockEsClient.indices.create.mockRejectedValue(new Error('devastating error'));
|
||||
|
||||
const store = new ReportingStore(mockCore, mockLogger);
|
||||
const mockReport = new Report({
|
||||
_index: '.reporting-mock',
|
||||
jobtype: 'unknowntype',
|
||||
payload: {},
|
||||
meta: {},
|
||||
} as any);
|
||||
await expect(store.addReport(mockReport)).rejects.toMatchInlineSnapshot(
|
||||
`[Error: devastating error]`
|
||||
);
|
||||
});
|
||||
|
||||
it('skips creating the index if already exists', async () => {
|
||||
// setup
|
||||
mockEsClient.indices.exists.mockResponse(false);
|
||||
// will be triggered but ignored
|
||||
mockEsClient.indices.create.mockRejectedValue(new Error('resource_already_exists_exception'));
|
||||
|
||||
const store = new ReportingStore(mockCore, mockLogger);
|
||||
const mockReport = new Report({
|
||||
created_by: 'user1',
|
||||
jobtype: 'unknowntype',
|
||||
payload: {},
|
||||
meta: {},
|
||||
} as any);
|
||||
await expect(store.addReport(mockReport)).resolves.toMatchObject({
|
||||
_primary_term: undefined,
|
||||
_seq_no: undefined,
|
||||
attempts: 0,
|
||||
created_by: 'user1',
|
||||
jobtype: 'unknowntype',
|
||||
payload: {},
|
||||
status: 'pending',
|
||||
});
|
||||
});
|
||||
|
||||
it('allows username string to be `false`', async () => {
|
||||
// setup
|
||||
mockEsClient.indices.exists.mockResponse(false);
|
||||
|
@ -426,16 +362,14 @@ describe('ReportingStore', () => {
|
|||
expect(mockEsClient.ilm.getLifecycle).toHaveBeenCalledWith({ name: 'kibana-reporting' });
|
||||
expect(mockEsClient.ilm.putLifecycle.mock.calls[0][0]).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"body": Object {
|
||||
"policy": Object {
|
||||
"phases": Object {
|
||||
"hot": Object {
|
||||
"actions": Object {},
|
||||
},
|
||||
"name": "kibana-reporting",
|
||||
"policy": Object {
|
||||
"phases": Object {
|
||||
"hot": Object {
|
||||
"actions": Object {},
|
||||
},
|
||||
},
|
||||
},
|
||||
"name": "kibana-reporting",
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
|
|
@ -14,15 +14,16 @@ import type {
|
|||
ReportOutput,
|
||||
ReportSource,
|
||||
} from '@kbn/reporting-common/types';
|
||||
import { REPORTING_SYSTEM_INDEX } from '@kbn/reporting-server';
|
||||
import {
|
||||
REPORTING_DATA_STREAM_ALIAS,
|
||||
REPORTING_DATA_STREAM_COMPONENT_TEMPLATE,
|
||||
} from '@kbn/reporting-server';
|
||||
import moment from 'moment';
|
||||
import type { Report } from '.';
|
||||
import { SavedReport } from '.';
|
||||
import type { ReportingCore } from '../..';
|
||||
import type { ReportTaskParams } from '../tasks';
|
||||
import { IlmPolicyManager } from './ilm_policy_manager';
|
||||
import { indexTimestamp } from './index_timestamp';
|
||||
import { mapping } from './mapping';
|
||||
import { MIGRATION_VERSION } from './report';
|
||||
|
||||
type UpdateResponse<T> = estypes.UpdateResponse<T>;
|
||||
|
@ -71,6 +72,7 @@ const sourceDoc = (doc: Partial<ReportSource>): Partial<ReportSource> => {
|
|||
return {
|
||||
...doc,
|
||||
migration_version: MIGRATION_VERSION,
|
||||
'@timestamp': new Date(0).toISOString(), // required for data streams compatibility
|
||||
};
|
||||
};
|
||||
|
||||
|
@ -103,16 +105,9 @@ const jobDebugMessage = (report: Report) =>
|
|||
* - interface for downloading the report
|
||||
*/
|
||||
export class ReportingStore {
|
||||
private readonly indexPrefix: string; // config setting of index prefix in system index name
|
||||
private readonly indexInterval: string; // config setting of index prefix: how often to poll for pending work
|
||||
private client?: ElasticsearchClient;
|
||||
config: ReportingCore['config'];
|
||||
|
||||
constructor(private reportingCore: ReportingCore, private logger: Logger) {
|
||||
this.config = reportingCore.getConfig();
|
||||
|
||||
this.indexPrefix = REPORTING_SYSTEM_INDEX;
|
||||
this.indexInterval = this.config.queue.indexInterval;
|
||||
this.logger = logger.get('store');
|
||||
}
|
||||
|
||||
|
@ -124,62 +119,28 @@ export class ReportingStore {
|
|||
return this.client;
|
||||
}
|
||||
|
||||
private async getIlmPolicyManager() {
|
||||
private async createIlmPolicy() {
|
||||
const client = await this.getClient();
|
||||
return IlmPolicyManager.create({ client });
|
||||
}
|
||||
|
||||
private async createIndex(indexName: string) {
|
||||
const client = await this.getClient();
|
||||
const exists = await client.indices.exists({ index: indexName });
|
||||
|
||||
if (exists) {
|
||||
return exists;
|
||||
const ilmPolicyManager = IlmPolicyManager.create({ client });
|
||||
if (await ilmPolicyManager.doesIlmPolicyExist()) {
|
||||
this.logger.debug(`Found ILM policy ${ILM_POLICY_NAME}; skipping creation.`);
|
||||
} else {
|
||||
this.logger.info(`Creating ILM policy for reporting data stream: ${ILM_POLICY_NAME}`);
|
||||
await ilmPolicyManager.createIlmPolicy();
|
||||
}
|
||||
|
||||
const indexSettings = this.config.statefulSettings.enabled
|
||||
? {
|
||||
settings: {
|
||||
number_of_shards: 1,
|
||||
auto_expand_replicas: '0-1',
|
||||
lifecycle: {
|
||||
name: ILM_POLICY_NAME,
|
||||
},
|
||||
},
|
||||
}
|
||||
: {};
|
||||
|
||||
try {
|
||||
await client.indices.create({
|
||||
index: indexName,
|
||||
body: {
|
||||
...indexSettings,
|
||||
mappings: {
|
||||
properties: mapping,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
const isIndexExistsError = error.message.match(/resource_already_exists_exception/);
|
||||
if (isIndexExistsError) {
|
||||
// Do not fail a job if the job runner hits the race condition.
|
||||
this.logger.warn(`Automatic index creation failed: index already exists: ${error}`);
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.error(error);
|
||||
|
||||
throw error;
|
||||
}
|
||||
this.logger.info(
|
||||
`Linking ILM policy to reporting data stream: ${REPORTING_DATA_STREAM_ALIAS}, component template: ${REPORTING_DATA_STREAM_COMPONENT_TEMPLATE}`
|
||||
);
|
||||
await ilmPolicyManager.linkIlmPolicy();
|
||||
}
|
||||
|
||||
private async indexReport(report: Report): Promise<IndexResponse> {
|
||||
const doc = {
|
||||
index: report._index!,
|
||||
index: REPORTING_DATA_STREAM_ALIAS,
|
||||
id: report._id,
|
||||
refresh: 'wait_for' as estypes.Refresh,
|
||||
op_type: 'create' as const,
|
||||
body: {
|
||||
...report.toReportSource(),
|
||||
...sourceDoc({
|
||||
|
@ -193,52 +154,23 @@ export class ReportingStore {
|
|||
return await client.index(doc);
|
||||
}
|
||||
|
||||
/*
|
||||
* Called from addReport, which handles any errors
|
||||
*/
|
||||
private async refreshIndex(index: string) {
|
||||
const client = await this.getClient();
|
||||
|
||||
return client.indices.refresh({ index });
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to be called during plugin start phase. This ensures the environment is correctly
|
||||
* configured for storage of reports.
|
||||
*/
|
||||
public async start() {
|
||||
if (!this.config.statefulSettings.enabled) {
|
||||
return;
|
||||
}
|
||||
const ilmPolicyManager = await this.getIlmPolicyManager();
|
||||
try {
|
||||
if (await ilmPolicyManager.doesIlmPolicyExist()) {
|
||||
this.logger.debug(`Found ILM policy ${ILM_POLICY_NAME}; skipping creation.`);
|
||||
return;
|
||||
}
|
||||
this.logger.info(`Creating ILM policy for managing reporting indices: ${ILM_POLICY_NAME}`);
|
||||
await ilmPolicyManager.createIlmPolicy();
|
||||
await this.createIlmPolicy();
|
||||
} catch (e) {
|
||||
this.logger.error('Error in start phase');
|
||||
this.logger.error(e.body?.error);
|
||||
this.logger.error(e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async addReport(report: Report): Promise<SavedReport> {
|
||||
let index = report._index;
|
||||
if (!index) {
|
||||
const timestamp = indexTimestamp(this.indexInterval);
|
||||
index = `${this.indexPrefix}-${timestamp}`;
|
||||
report._index = index;
|
||||
}
|
||||
await this.createIndex(index);
|
||||
|
||||
try {
|
||||
report.updateWithEsDoc(await this.indexReport(report));
|
||||
|
||||
await this.refreshIndex(index);
|
||||
|
||||
return report as SavedReport;
|
||||
} catch (err) {
|
||||
this.reportingCore.getEventLogger(report).logError(err);
|
||||
|
@ -402,8 +334,4 @@ export class ReportingStore {
|
|||
|
||||
return body;
|
||||
}
|
||||
|
||||
public getReportingIndexPattern(): string {
|
||||
return `${this.indexPrefix}-*`;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ import type { ElasticsearchClient } from '@kbn/core/server';
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import { JOB_STATUS } from '@kbn/reporting-common';
|
||||
import type { ReportApiJSON, ReportSource } from '@kbn/reporting-common/types';
|
||||
import { REPORTING_SYSTEM_INDEX } from '@kbn/reporting-server';
|
||||
import { REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY } from '@kbn/reporting-server';
|
||||
import type { ReportingCore } from '../../..';
|
||||
import { Report } from '../../../lib/store';
|
||||
import { runtimeFieldKeys, runtimeFields } from '../../../lib/store/runtime_fields';
|
||||
|
@ -54,10 +54,6 @@ export interface JobsQueryFactory {
|
|||
}
|
||||
|
||||
export function jobsQueryFactory(reportingCore: ReportingCore): JobsQueryFactory {
|
||||
function getIndex() {
|
||||
return `${REPORTING_SYSTEM_INDEX}-*`;
|
||||
}
|
||||
|
||||
async function execQuery<
|
||||
T extends (client: ElasticsearchClient) => Promise<Awaited<ReturnType<T>> | undefined>
|
||||
>(callback: T): Promise<Awaited<ReturnType<T>> | undefined> {
|
||||
|
@ -96,7 +92,7 @@ export function jobsQueryFactory(reportingCore: ReportingCore): JobsQueryFactory
|
|||
});
|
||||
|
||||
const response = (await execQuery((elasticsearchClient) =>
|
||||
elasticsearchClient.search({ body, index: getIndex() })
|
||||
elasticsearchClient.search({ body, index: REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY })
|
||||
)) as estypes.SearchResponse<ReportSource>;
|
||||
|
||||
return (
|
||||
|
@ -127,7 +123,7 @@ export function jobsQueryFactory(reportingCore: ReportingCore): JobsQueryFactory
|
|||
};
|
||||
|
||||
const response = await execQuery((elasticsearchClient) =>
|
||||
elasticsearchClient.count({ body, index: getIndex() })
|
||||
elasticsearchClient.count({ body, index: REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY })
|
||||
);
|
||||
|
||||
return response?.count ?? 0;
|
||||
|
@ -156,7 +152,10 @@ export function jobsQueryFactory(reportingCore: ReportingCore): JobsQueryFactory
|
|||
});
|
||||
|
||||
const response = await execQuery((elasticsearchClient) =>
|
||||
elasticsearchClient.search<ReportSource>({ body, index: getIndex() })
|
||||
elasticsearchClient.search<ReportSource>({
|
||||
body,
|
||||
index: REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY,
|
||||
})
|
||||
);
|
||||
|
||||
const result = response?.hits?.hits?.[0];
|
||||
|
@ -187,7 +186,10 @@ export function jobsQueryFactory(reportingCore: ReportingCore): JobsQueryFactory
|
|||
};
|
||||
|
||||
const response = await execQuery((elasticsearchClient) =>
|
||||
elasticsearchClient.search<ReportSource>({ body, index: getIndex() })
|
||||
elasticsearchClient.search<ReportSource>({
|
||||
body,
|
||||
index: REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY,
|
||||
})
|
||||
);
|
||||
const hits = response?.hits?.hits?.[0];
|
||||
const status = hits?._source?.status;
|
||||
|
|
|
@ -6,11 +6,11 @@
|
|||
*/
|
||||
import { errors } from '@elastic/elasticsearch';
|
||||
import type { Logger, RequestHandler } from '@kbn/core/server';
|
||||
import { ILM_POLICY_NAME, INTERNAL_ROUTES } from '@kbn/reporting-common';
|
||||
import type { IlmPolicyStatusResponse } from '@kbn/reporting-common/url';
|
||||
import { INTERNAL_ROUTES } from '@kbn/reporting-common';
|
||||
import type { IlmPolicyStatusResponse } from '@kbn/reporting-common/types';
|
||||
import { REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY } from '@kbn/reporting-server';
|
||||
import type { ReportingCore } from '../../../core';
|
||||
import { IlmPolicyManager } from '../../../lib';
|
||||
import { deprecations } from '../../../lib/deprecations';
|
||||
import { getCounters } from '../../common';
|
||||
|
||||
const getAuthzWrapper =
|
||||
|
@ -24,15 +24,13 @@ const getAuthzWrapper =
|
|||
|
||||
const { elasticsearch } = await ctx.core;
|
||||
|
||||
const store = await reporting.getStore();
|
||||
|
||||
try {
|
||||
const body = await elasticsearch.client.asCurrentUser.security.hasPrivileges({
|
||||
body: {
|
||||
index: [
|
||||
{
|
||||
privileges: ['manage'], // required to do anything with the reporting indices
|
||||
names: [store.getReportingIndexPattern()],
|
||||
names: [REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY],
|
||||
allow_restricted_indices: true,
|
||||
},
|
||||
],
|
||||
|
@ -65,15 +63,11 @@ export const registerDeprecationsRoutes = (reporting: ReportingCore, logger: Log
|
|||
authzWrapper(async ({ core }, req, res) => {
|
||||
const counters = getCounters(req.route.method, getStatusPath, reporting.getUsageCounter());
|
||||
|
||||
const {
|
||||
elasticsearch: { client: scopedClient },
|
||||
} = await core;
|
||||
const checkIlmMigrationStatus = () => {
|
||||
return deprecations.checkIlmMigrationStatus({
|
||||
reportingCore: reporting,
|
||||
// We want to make the current status visible to all reporting users
|
||||
elasticsearchClient: scopedClient.asInternalUser,
|
||||
});
|
||||
const checkIlmMigrationStatus = async () => {
|
||||
const { client: scopedClient } = (await core).elasticsearch;
|
||||
|
||||
const ilmPolicyManager = IlmPolicyManager.create({ client: scopedClient.asInternalUser });
|
||||
return ilmPolicyManager.checkIlmMigrationStatus();
|
||||
};
|
||||
|
||||
try {
|
||||
|
@ -106,17 +100,15 @@ export const registerDeprecationsRoutes = (reporting: ReportingCore, logger: Log
|
|||
authzWrapper(async ({ core }, req, res) => {
|
||||
const counters = getCounters(req.route.method, migrateApiPath, reporting.getUsageCounter());
|
||||
|
||||
const store = await reporting.getStore();
|
||||
const {
|
||||
client: { asCurrentUser: client },
|
||||
} = (await core).elasticsearch;
|
||||
|
||||
const scopedIlmPolicyManager = IlmPolicyManager.create({
|
||||
client,
|
||||
});
|
||||
|
||||
// First we ensure that the reporting ILM policy exists in the cluster
|
||||
try {
|
||||
const {
|
||||
client: { asCurrentUser },
|
||||
} = (await core).elasticsearch;
|
||||
const scopedIlmPolicyManager = IlmPolicyManager.create({
|
||||
client: asCurrentUser,
|
||||
});
|
||||
|
||||
// We don't want to overwrite an existing reporting policy because it may contain alterations made by users
|
||||
if (!(await scopedIlmPolicyManager.doesIlmPolicyExist())) {
|
||||
await scopedIlmPolicyManager.createIlmPolicy();
|
||||
|
@ -125,24 +117,19 @@ export const registerDeprecationsRoutes = (reporting: ReportingCore, logger: Log
|
|||
return res.customError({ statusCode: e?.statusCode ?? 500, body: { message: e.message } });
|
||||
}
|
||||
|
||||
const indexPattern = store.getReportingIndexPattern();
|
||||
|
||||
// Second we migrate all of the existing indices to be managed by the reporting ILM policy
|
||||
try {
|
||||
await client.indices.putSettings({
|
||||
index: indexPattern,
|
||||
body: {
|
||||
index: {
|
||||
lifecycle: {
|
||||
name: ILM_POLICY_NAME,
|
||||
},
|
||||
},
|
||||
},
|
||||
const {
|
||||
client: { asInternalUser },
|
||||
} = (await core).elasticsearch;
|
||||
const unscopedIlmPolicyManager = IlmPolicyManager.create({
|
||||
client: asInternalUser,
|
||||
});
|
||||
const response = await unscopedIlmPolicyManager.migrateIndicesToIlmPolicy();
|
||||
|
||||
counters.usageCounter();
|
||||
|
||||
return res.ok();
|
||||
return res.ok({ body: response });
|
||||
} catch (err) {
|
||||
logger.error(err);
|
||||
|
||||
|
|
|
@ -36,14 +36,14 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await browser.refresh();
|
||||
};
|
||||
|
||||
const getReport = async () => {
|
||||
const getReport = async ({ timeout } = { timeout: 60 * 1000 }) => {
|
||||
// close any open notification toasts
|
||||
await toasts.dismissAll();
|
||||
|
||||
await PageObjects.reporting.openExportTab();
|
||||
await PageObjects.reporting.clickGenerateReportButton();
|
||||
|
||||
const url = await PageObjects.reporting.getReportURL(60000);
|
||||
const url = await PageObjects.reporting.getReportURL(timeout);
|
||||
const res = await PageObjects.reporting.getResponse(url ?? '');
|
||||
|
||||
expect(res.status).to.equal(200);
|
||||
|
@ -173,7 +173,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await PageObjects.discover.saveSearch('large export');
|
||||
|
||||
// match file length, the beginning and the end of the csv file contents
|
||||
const { text: csvFile } = await getReport();
|
||||
const { text: csvFile } = await getReport({ timeout: 80 * 1000 });
|
||||
expect(csvFile.length).to.be(4826973);
|
||||
expectSnapshot(csvFile.slice(0, 5000)).toMatch();
|
||||
expectSnapshot(csvFile.slice(-5000)).toMatch();
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { expect } from 'expect';
|
||||
import { FtrProviderContext } from '../ftr_provider_context';
|
||||
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
export default function ({ getService }: FtrProviderContext) {
|
||||
const reportingAPI = getService('reportingAPI');
|
||||
const supertest = getService('supertest');
|
||||
|
||||
describe('Data Stream', () => {
|
||||
before(async () => {
|
||||
await reportingAPI.initEcommerce();
|
||||
|
||||
// for this test, we don't need to wait for the job to finish or verify the result
|
||||
await reportingAPI.postJob(
|
||||
`/api/reporting/generate/csv_searchsource?jobParams=%28browserTimezone%3AUTC%2Ccolumns%3A%21%28%29%2CobjectType%3Asearch%2CsearchSource%3A%28fields%3A%21%28%28field%3A%27%2A%27%2Cinclude_unmapped%3Atrue%29%29%2Cfilter%3A%21%28%28meta%3A%28field%3A%27%40timestamp%27%2Cindex%3A%27logstash-%2A%27%2Cparams%3A%28%29%29%2Cquery%3A%28range%3A%28%27%40timestamp%27%3A%28format%3Astrict_date_optional_time%2Cgte%3A%272015-09-22T09%3A17%3A53.728Z%27%2Clte%3A%272015-09-22T09%3A30%3A50.786Z%27%29%29%29%29%2C%28%27%24state%27%3A%28store%3AappState%29%2Cmeta%3A%28alias%3A%21n%2Cdisabled%3A%21f%2Cindex%3A%27logstash-%2A%27%2Ckey%3Aquery%2Cnegate%3A%21f%2Ctype%3Acustom%2Cvalue%3A%27%7B%22bool%22%3A%7B%22minimum_should_match%22%3A1%2C%22should%22%3A%5B%7B%22match_phrase%22%3A%7B%22%40tags%22%3A%22info%22%7D%7D%5D%7D%7D%27%29%2Cquery%3A%28bool%3A%28minimum_should_match%3A1%2Cshould%3A%21%28%28match_phrase%3A%28%27%40tags%27%3Ainfo%29%29%29%29%29%29%29%2Cindex%3A%27logstash-%2A%27%2Cquery%3A%28language%3Akuery%2Cquery%3A%27%27%29%2Csort%3A%21%28%28%27%40timestamp%27%3A%28format%3Astrict_date_optional_time%2Corder%3Adesc%29%29%29%29%2Ctitle%3A%27A%20saved%20search%20with%20match_phrase%20filter%20and%20no%20columns%20selected%27%2Cversion%3A%278.15.0%27%29`
|
||||
);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await reportingAPI.deleteAllReports();
|
||||
await reportingAPI.teardownEcommerce();
|
||||
});
|
||||
|
||||
it('uses the datastream configuration without set policy', async () => {
|
||||
const { body } = await supertest
|
||||
.get(`/api/index_management/data_streams/.kibana-reporting`)
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.set('x-elastic-internal-origin', 'xxx')
|
||||
.expect(200);
|
||||
|
||||
expect(body).toEqual({
|
||||
_meta: {
|
||||
description: 'default kibana reporting template installed by elasticsearch',
|
||||
managed: true,
|
||||
},
|
||||
name: '.kibana-reporting',
|
||||
indexTemplateName: '.kibana-reporting',
|
||||
timeStampField: { name: '@timestamp' },
|
||||
indices: [
|
||||
{
|
||||
name: expect.any(String),
|
||||
uuid: expect.any(String),
|
||||
managedBy: 'Index Lifecycle Management',
|
||||
preferILM: true,
|
||||
},
|
||||
],
|
||||
generation: 1,
|
||||
health: 'green',
|
||||
ilmPolicyName: 'kibana-reporting',
|
||||
maxTimeStamp: 0,
|
||||
privileges: { delete_index: true, manage_data_stream_lifecycle: true },
|
||||
hidden: true,
|
||||
lifecycle: { enabled: true },
|
||||
nextGenerationManagedBy: 'Index Lifecycle Management',
|
||||
storageSize: expect.any(String),
|
||||
storageSizeBytes: expect.any(Number),
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -42,7 +42,8 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
cluster: ['manage_ilm'],
|
||||
indices: [
|
||||
{ names: ['ecommerce'], privileges: ['read'], allow_restricted_indices: false },
|
||||
{ names: ['.reporting-*'], privileges: ['all'], allow_restricted_indices: true },
|
||||
{ names: ['.reporting-*'], privileges: ['all'], allow_restricted_indices: true }, // plain indices (from old version)
|
||||
{ names: ['.kibana-reporting'], privileges: ['all'], allow_restricted_indices: true }, // data stream
|
||||
],
|
||||
run_as: [],
|
||||
},
|
||||
|
|
|
@ -22,6 +22,7 @@ export default function ({ getService, loadTestFile }: FtrProviderContext) {
|
|||
loadTestFile(require.resolve('./bwc_existing_indexes'));
|
||||
loadTestFile(require.resolve('./security_roles_privileges'));
|
||||
loadTestFile(require.resolve('./generate_csv_discover'));
|
||||
loadTestFile(require.resolve('./datastream'));
|
||||
loadTestFile(require.resolve('./csv_v2'));
|
||||
loadTestFile(require.resolve('./csv_v2_esql'));
|
||||
loadTestFile(require.resolve('./network_policy'));
|
||||
|
|
|
@ -5,12 +5,16 @@
|
|||
* 2.0.
|
||||
*/
|
||||
|
||||
import type { LoadActionPerfOptions } from '@kbn/es-archiver';
|
||||
import { INTERNAL_ROUTES } from '@kbn/reporting-common';
|
||||
import type { JobParamsCSV } from '@kbn/reporting-export-types-csv-common';
|
||||
import type { JobParamsPDFDeprecated } from '@kbn/reporting-export-types-pdf-common';
|
||||
import type { JobParamsPNGV2 } from '@kbn/reporting-export-types-png-common';
|
||||
import type { JobParamsCSV } from '@kbn/reporting-export-types-csv-common';
|
||||
import {
|
||||
REPORTING_DATA_STREAM_WILDCARD,
|
||||
REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY,
|
||||
} from '@kbn/reporting-server';
|
||||
import rison from '@kbn/rison';
|
||||
import { LoadActionPerfOptions } from '@kbn/es-archiver';
|
||||
import { INTERNAL_ROUTES } from '@kbn/reporting-common';
|
||||
import { FtrProviderContext } from '../ftr_provider_context';
|
||||
|
||||
function removeWhitespace(str: string) {
|
||||
|
@ -64,7 +68,6 @@ export function createScenarios({ getService }: Pick<FtrProviderContext, 'getSer
|
|||
const teardownEcommerce = async () => {
|
||||
await esArchiver.unload('x-pack/test/functional/es_archives/reporting/ecommerce');
|
||||
await kibanaServer.importExport.unload(ecommerceSOPath);
|
||||
await deleteAllReports();
|
||||
};
|
||||
|
||||
const initLogs = async () => {
|
||||
|
@ -211,7 +214,7 @@ export function createScenarios({ getService }: Pick<FtrProviderContext, 'getSer
|
|||
// ignores 409 errs and keeps retrying
|
||||
await retry.tryForTime(5000, async () => {
|
||||
await esSupertest
|
||||
.post('/.reporting*/_delete_by_query')
|
||||
.post(`/${REPORTING_DATA_STREAM_WILDCARD_WITH_LEGACY}/_delete_by_query`)
|
||||
.send({ query: { match_all: {} } })
|
||||
.expect(200);
|
||||
});
|
||||
|
@ -248,7 +251,7 @@ export function createScenarios({ getService }: Pick<FtrProviderContext, 'getSer
|
|||
'index.lifecycle.name': null,
|
||||
};
|
||||
await esSupertest
|
||||
.put('/.reporting*/_settings')
|
||||
.put(`/${REPORTING_DATA_STREAM_WILDCARD}/_settings`)
|
||||
.send({
|
||||
settings,
|
||||
})
|
||||
|
|
|
@ -4,11 +4,10 @@
|
|||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import { INTERNAL_ROUTES, PUBLIC_ROUTES } from '@kbn/reporting-common';
|
||||
import { indexTimestamp } from '@kbn/reporting-plugin/server/lib/store/index_timestamp';
|
||||
import { Response } from 'supertest';
|
||||
import { indexTimestamp } from './index_timestamp';
|
||||
import { FtrProviderContext } from '../ftr_provider_context';
|
||||
|
||||
export function createUsageServices({ getService }: FtrProviderContext) {
|
||||
|
|
|
@ -175,5 +175,6 @@
|
|||
"@kbn/esql-utils",
|
||||
"@kbn/search-types",
|
||||
"@kbn/analytics-ftr-helpers-plugin",
|
||||
"@kbn/reporting-server",
|
||||
]
|
||||
}
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import { expect } from 'expect';
|
||||
import { FtrProviderContext } from '../../../ftr_provider_context';
|
||||
|
||||
export default function ({ getService }: FtrProviderContext) {
|
||||
const esArchiver = getService('esArchiver');
|
||||
const kibanaServer = getService('kibanaServer');
|
||||
const reportingAPI = getService('svlReportingApi');
|
||||
const supertest = getService('supertest');
|
||||
|
||||
const archives: Record<string, { data: string; savedObjects: string }> = {
|
||||
ecommerce: {
|
||||
data: 'x-pack/test/functional/es_archives/reporting/ecommerce',
|
||||
savedObjects: 'x-pack/test/functional/fixtures/kbn_archiver/reporting/ecommerce',
|
||||
},
|
||||
};
|
||||
|
||||
describe('Data Stream', () => {
|
||||
before(async () => {
|
||||
await esArchiver.load(archives.ecommerce.data);
|
||||
await kibanaServer.importExport.load(archives.ecommerce.savedObjects);
|
||||
|
||||
// for this test, we don't need to wait for the job to finish or verify the result
|
||||
await reportingAPI.createReportJobInternal('csv_searchsource', {
|
||||
browserTimezone: 'UTC',
|
||||
objectType: 'search',
|
||||
searchSource: {
|
||||
index: '5193f870-d861-11e9-a311-0fa548c5f953',
|
||||
query: { language: 'kuery', query: '' },
|
||||
version: true,
|
||||
},
|
||||
title: 'Ecommerce Data',
|
||||
version: '8.15.0',
|
||||
});
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await reportingAPI.deleteAllReports();
|
||||
await esArchiver.unload(archives.ecommerce.data);
|
||||
await kibanaServer.importExport.unload(archives.ecommerce.savedObjects);
|
||||
});
|
||||
|
||||
it('uses the datastream configuration with set ILM policy', async () => {
|
||||
const { body } = await supertest
|
||||
.get(`/api/index_management/data_streams/.kibana-reporting`)
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.set('x-elastic-internal-origin', 'xxx')
|
||||
.expect(200);
|
||||
|
||||
expect(body).toEqual({
|
||||
_meta: {
|
||||
description: 'default kibana reporting template installed by elasticsearch',
|
||||
managed: true,
|
||||
},
|
||||
name: '.kibana-reporting',
|
||||
indexTemplateName: '.kibana-reporting',
|
||||
generation: 1,
|
||||
health: 'green',
|
||||
hidden: true,
|
||||
indices: [
|
||||
{
|
||||
name: expect.any(String),
|
||||
uuid: expect.any(String),
|
||||
managedBy: 'Data stream lifecycle',
|
||||
preferILM: true,
|
||||
},
|
||||
],
|
||||
lifecycle: { enabled: true },
|
||||
maxTimeStamp: 0,
|
||||
nextGenerationManagedBy: 'Data stream lifecycle',
|
||||
privileges: { delete_index: true, manage_data_stream_lifecycle: true },
|
||||
timeStampField: { name: '@timestamp' },
|
||||
storageSize: expect.any(String),
|
||||
storageSizeBytes: expect.any(Number),
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
|
@ -730,7 +730,9 @@ export default function ({ getService }: FtrProviderContext) {
|
|||
},
|
||||
})
|
||||
);
|
||||
await reportingAPI.waitForJobToFinish(res.path);
|
||||
await reportingAPI.waitForJobToFinish(res.path, undefined, undefined, {
|
||||
timeout: 80 * 1000,
|
||||
});
|
||||
const csvFile = await reportingAPI.getCompletedJobOutput(res.path);
|
||||
expect((csvFile as string).length).to.be(4826973);
|
||||
expectSnapshot(createPartialCsv(csvFile)).toMatch();
|
||||
|
|
|
@ -13,5 +13,6 @@ export default ({ loadTestFile }: FtrProviderContext) => {
|
|||
|
||||
loadTestFile(require.resolve('./management'));
|
||||
loadTestFile(require.resolve('./generate_csv_discover'));
|
||||
loadTestFile(require.resolve('./datastream'));
|
||||
});
|
||||
};
|
||||
|
|
|
@ -35,14 +35,14 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await browser.refresh();
|
||||
};
|
||||
|
||||
const getReport = async () => {
|
||||
const getReport = async ({ timeout } = { timeout: 60 * 1000 }) => {
|
||||
// close any open notification toasts
|
||||
await toasts.dismissAll();
|
||||
|
||||
await PageObjects.reporting.openExportTab();
|
||||
await PageObjects.reporting.clickGenerateReportButton();
|
||||
|
||||
const url = await PageObjects.reporting.getReportURL(60000);
|
||||
const url = await PageObjects.reporting.getReportURL(timeout);
|
||||
// TODO: Fetch CSV client side in Serverless since `PageObjects.reporting.getResponse()`
|
||||
// doesn't work because it relies on `SecurityService.testUserSupertest`
|
||||
const res: { status: number; contentType: string | null; text: string } =
|
||||
|
@ -184,7 +184,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await PageObjects.discover.saveSearch('large export');
|
||||
|
||||
// match file length, the beginning and the end of the csv file contents
|
||||
const { text: csvFile } = await getReport();
|
||||
const { text: csvFile } = await getReport({ timeout: 80 * 1000 });
|
||||
expect(csvFile.length).to.be(4826973);
|
||||
expectSnapshot(csvFile.slice(0, 5000)).toMatch();
|
||||
expectSnapshot(csvFile.slice(-5000)).toMatch();
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue