mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
[ES|QL] CSV Reporting in Discover (#174511)
## Summary close https://github.com/elastic/kibana/issues/173390 This PR enables CSV report generation with ES|QL in Discover. Pre this PR, there are two report types for generating CSV reports with Discover: - https://github.com/elastic/kibana/blob/main/packages/kbn-reporting/export_types/csv/csv_searchsource.ts - old deprecated report type that relies on `SerializedSearchSourceFields`. This type is still currently used by Discover UI and we plan to migrate away from it https://github.com/elastic/kibana/issues/151190 - https://github.com/elastic/kibana/blob/main/packages/kbn-reporting/export_types/csv/csv_v2.ts - new report type that relies on Discover locator. Currently, it can only generate reports from searches backed by saved search and this report type was implemented as a public-facing API for simple report generation outside Kibana. Since we plan to migrate to the v2 report type and search source is not needed for es|ql, this PR implements es|ql csv reporting based on the v2 report type. Initially, I wanted to create new new report type similar to v2 just for es|ql, but it turned out to be a lot more boilerplate code without a significant benefit. you can see it in the PR https://github.com/elastic/kibana/pull/174448. So I changed my mind and the current PR adds es|ql capabilities inside the existing csv_v2 report. This is convenient as the input is the same (Discover locator), the output is the same csv file and meta information, and telemetry is also the same. As of this PR, the ES|QL report is capable of: - Using es|ql query from the locator - Adding time range filter if present in the locator. time field is picked from the data view (it is available in the locator, but otherwise is not used for es|ql). Other filters are also passed if available in the locator - Keeps into account "columns" from the locator. - Similar to current non-es|ql reports from discover UI, it doesn't use saved searches but only relies on the state from the URL. This probably will be improved in https://github.com/elastic/kibana/issues/151190 to support both. - Uses different CSV settings for functionality like checking formulas, escapes, bom, max size, etc... - Keeps regular CSV features like cancelation and giving event loop a break (even though those are not needed for now for es|ql since the number of results is limited) Some notable differences compared to regular discover search / csv reports: - A lot simpler, as it doesn't use search source and field formats - No pagination, less CPU heavy as esql responses are limited to 10000 results and a single request
This commit is contained in:
parent
d5b837ddd7
commit
f38f87b579
25 changed files with 1189 additions and 41 deletions
|
@ -22,4 +22,5 @@ export type {
|
|||
ESQLColumn,
|
||||
ESQLRow,
|
||||
ESQLSearchReponse,
|
||||
ESQLSearchParams,
|
||||
} from './src';
|
||||
|
|
|
@ -15,6 +15,7 @@ import {
|
|||
ESQLColumn,
|
||||
ESQLRow,
|
||||
ESQLSearchReponse,
|
||||
ESQLSearchParams,
|
||||
} from './search';
|
||||
|
||||
export type ESFilter = estypes.QueryDslQueryContainer;
|
||||
|
@ -47,4 +48,5 @@ export type {
|
|||
ESQLColumn,
|
||||
ESQLRow,
|
||||
ESQLSearchReponse,
|
||||
ESQLSearchParams,
|
||||
};
|
||||
|
|
|
@ -665,3 +665,13 @@ export interface ESQLSearchReponse {
|
|||
columns: ESQLColumn[];
|
||||
values: ESQLRow[];
|
||||
}
|
||||
|
||||
export interface ESQLSearchParams {
|
||||
// TODO: time_zone support was temporarily removed from ES|QL,
|
||||
// we will need to add it back in once it is supported again.
|
||||
// https://github.com/elastic/elasticsearch/pull/102767
|
||||
// time_zone?: string;
|
||||
query: string;
|
||||
filter?: unknown;
|
||||
locale?: string;
|
||||
}
|
||||
|
|
|
@ -7,3 +7,4 @@
|
|||
*/
|
||||
|
||||
export { CsvGenerator } from './src/generate_csv';
|
||||
export { CsvESQLGenerator, type JobParamsCsvESQL } from './src/generate_csv_esql';
|
||||
|
|
479
packages/kbn-generate-csv/src/generate_csv_esql.test.ts
Normal file
479
packages/kbn-generate-csv/src/generate_csv_esql.test.ts
Normal file
|
@ -0,0 +1,479 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import * as Rx from 'rxjs';
|
||||
import type { Writable } from 'stream';
|
||||
|
||||
import { errors as esErrors } from '@elastic/elasticsearch';
|
||||
import type { IScopedClusterClient, IUiSettingsClient, Logger } from '@kbn/core/server';
|
||||
import {
|
||||
elasticsearchServiceMock,
|
||||
loggingSystemMock,
|
||||
savedObjectsClientMock,
|
||||
uiSettingsServiceMock,
|
||||
} from '@kbn/core/server/mocks';
|
||||
import { IKibanaSearchResponse } from '@kbn/data-plugin/common';
|
||||
import { IScopedSearchClient } from '@kbn/data-plugin/server';
|
||||
import { dataPluginMock } from '@kbn/data-plugin/server/mocks';
|
||||
import { CancellationToken } from '@kbn/reporting-common';
|
||||
import type { ReportingConfigType } from '@kbn/reporting-server';
|
||||
import {
|
||||
UI_SETTINGS_CSV_QUOTE_VALUES,
|
||||
UI_SETTINGS_CSV_SEPARATOR,
|
||||
UI_SETTINGS_DATEFORMAT_TZ,
|
||||
} from './constants';
|
||||
import { CsvESQLGenerator, JobParamsCsvESQL } from './generate_csv_esql';
|
||||
import type { ESQLSearchReponse } from '@kbn/es-types';
|
||||
|
||||
const createMockJob = (
|
||||
params: Partial<JobParamsCsvESQL> = { query: { esql: '' } }
|
||||
): JobParamsCsvESQL => ({
|
||||
...params,
|
||||
query: { esql: '' },
|
||||
});
|
||||
|
||||
describe('CsvESQLGenerator', () => {
|
||||
let mockEsClient: IScopedClusterClient;
|
||||
let mockDataClient: IScopedSearchClient;
|
||||
let mockConfig: ReportingConfigType['csv'];
|
||||
let mockLogger: jest.Mocked<Logger>;
|
||||
let uiSettingsClient: IUiSettingsClient;
|
||||
let stream: jest.Mocked<Writable>;
|
||||
let content: string;
|
||||
|
||||
const getMockRawResponse = (
|
||||
esqlResponse: ESQLSearchReponse = {
|
||||
columns: [],
|
||||
values: [],
|
||||
}
|
||||
): ESQLSearchReponse => esqlResponse;
|
||||
|
||||
const mockDataClientSearchDefault = jest.fn().mockImplementation(
|
||||
(): Rx.Observable<IKibanaSearchResponse<ESQLSearchReponse>> =>
|
||||
Rx.of({
|
||||
rawResponse: getMockRawResponse(),
|
||||
})
|
||||
);
|
||||
|
||||
const mockSearchResponse = (response: ESQLSearchReponse) => {
|
||||
mockDataClient.search = jest.fn().mockImplementation(() =>
|
||||
Rx.of({
|
||||
rawResponse: getMockRawResponse(response),
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
beforeEach(async () => {
|
||||
content = '';
|
||||
stream = { write: jest.fn((chunk) => (content += chunk)) } as unknown as typeof stream;
|
||||
mockEsClient = elasticsearchServiceMock.createScopedClusterClient();
|
||||
mockDataClient = dataPluginMock.createStartContract().search.asScoped({} as any);
|
||||
mockDataClient.search = mockDataClientSearchDefault;
|
||||
uiSettingsClient = uiSettingsServiceMock
|
||||
.createStartContract()
|
||||
.asScopedToClient(savedObjectsClientMock.create());
|
||||
uiSettingsClient.get = jest.fn().mockImplementation((key): any => {
|
||||
switch (key) {
|
||||
case UI_SETTINGS_CSV_QUOTE_VALUES:
|
||||
return true;
|
||||
case UI_SETTINGS_CSV_SEPARATOR:
|
||||
return ',';
|
||||
case UI_SETTINGS_DATEFORMAT_TZ:
|
||||
return 'Browser';
|
||||
}
|
||||
});
|
||||
|
||||
mockConfig = {
|
||||
checkForFormulas: true,
|
||||
escapeFormulaValues: true,
|
||||
maxSizeBytes: 180000,
|
||||
useByteOrderMarkEncoding: false,
|
||||
scroll: { size: 500, duration: '30s' },
|
||||
enablePanelActionDownload: true,
|
||||
maxConcurrentShardRequests: 5,
|
||||
};
|
||||
|
||||
mockLogger = loggingSystemMock.createLogger();
|
||||
});
|
||||
|
||||
it('formats an empty search result to CSV content', async () => {
|
||||
const generateCsv = new CsvESQLGenerator(
|
||||
createMockJob({ columns: ['date', 'ip', 'message'] }),
|
||||
mockConfig,
|
||||
{
|
||||
es: mockEsClient,
|
||||
data: mockDataClient,
|
||||
uiSettings: uiSettingsClient,
|
||||
},
|
||||
new CancellationToken(),
|
||||
mockLogger,
|
||||
stream
|
||||
);
|
||||
const csvResult = await generateCsv.generateData();
|
||||
expect(content).toMatchInlineSnapshot(`
|
||||
"
|
||||
"
|
||||
`);
|
||||
expect(csvResult.csv_contains_formulas).toBe(false);
|
||||
});
|
||||
|
||||
it('formats a search result to CSV content', async () => {
|
||||
mockSearchResponse({
|
||||
columns: [
|
||||
{ name: 'date', type: 'date' },
|
||||
{ name: 'ip', type: 'ip' },
|
||||
{ name: 'message', type: 'string' },
|
||||
{ name: 'geo.coordinates', type: 'geo_point' },
|
||||
],
|
||||
values: [['2020-12-31T00:14:28.000Z', '110.135.176.89', 'This is a great message!', null]],
|
||||
});
|
||||
|
||||
const generateCsv = new CsvESQLGenerator(
|
||||
createMockJob(),
|
||||
mockConfig,
|
||||
{
|
||||
es: mockEsClient,
|
||||
data: mockDataClient,
|
||||
uiSettings: uiSettingsClient,
|
||||
},
|
||||
new CancellationToken(),
|
||||
mockLogger,
|
||||
stream
|
||||
);
|
||||
const csvResult = await generateCsv.generateData();
|
||||
expect(content).toMatchInlineSnapshot(`
|
||||
"date,ip,message,\\"geo.coordinates\\"
|
||||
\\"2020-12-31T00:14:28.000Z\\",\\"110.135.176.89\\",\\"This is a great message!\\",
|
||||
"
|
||||
`);
|
||||
expect(csvResult.csv_contains_formulas).toBe(false);
|
||||
});
|
||||
|
||||
it('calculates the bytes of the content', async () => {
|
||||
mockSearchResponse({
|
||||
columns: [{ name: 'message', type: 'string' }],
|
||||
values: Array(100).fill(['This is a great message!']),
|
||||
});
|
||||
|
||||
const generateCsv = new CsvESQLGenerator(
|
||||
createMockJob(),
|
||||
mockConfig,
|
||||
{
|
||||
es: mockEsClient,
|
||||
data: mockDataClient,
|
||||
uiSettings: uiSettingsClient,
|
||||
},
|
||||
new CancellationToken(),
|
||||
mockLogger,
|
||||
stream
|
||||
);
|
||||
const csvResult = await generateCsv.generateData();
|
||||
expect(csvResult.max_size_reached).toBe(false);
|
||||
expect(csvResult.warnings).toEqual([]);
|
||||
});
|
||||
|
||||
it('warns if max size was reached', async () => {
|
||||
const TEST_MAX_SIZE = 50;
|
||||
mockConfig = {
|
||||
...mockConfig,
|
||||
maxSizeBytes: TEST_MAX_SIZE,
|
||||
};
|
||||
|
||||
mockSearchResponse({
|
||||
columns: [{ name: 'message', type: 'string' }],
|
||||
values: Array(100).fill(['This is a great message!']),
|
||||
});
|
||||
|
||||
const generateCsv = new CsvESQLGenerator(
|
||||
createMockJob(),
|
||||
mockConfig,
|
||||
{
|
||||
es: mockEsClient,
|
||||
data: mockDataClient,
|
||||
uiSettings: uiSettingsClient,
|
||||
},
|
||||
new CancellationToken(),
|
||||
mockLogger,
|
||||
stream
|
||||
);
|
||||
const csvResult = await generateCsv.generateData();
|
||||
expect(csvResult.max_size_reached).toBe(true);
|
||||
expect(csvResult.warnings).toEqual([]);
|
||||
expect(content).toMatchInlineSnapshot(`
|
||||
"message
|
||||
\\"This is a great message!\\"
|
||||
"
|
||||
`);
|
||||
});
|
||||
|
||||
describe('jobParams', () => {
|
||||
it('uses columns to select columns', async () => {
|
||||
mockSearchResponse({
|
||||
columns: [
|
||||
{ name: 'date', type: 'date' },
|
||||
{ name: 'ip', type: 'ip' },
|
||||
{ name: 'message', type: 'string' },
|
||||
],
|
||||
values: [['2020-12-31T00:14:28.000Z', '110.135.176.89', 'This is a great message!']],
|
||||
});
|
||||
|
||||
const generateCsv = new CsvESQLGenerator(
|
||||
createMockJob({ columns: ['message', 'date', 'something else'] }),
|
||||
mockConfig,
|
||||
{
|
||||
es: mockEsClient,
|
||||
data: mockDataClient,
|
||||
uiSettings: uiSettingsClient,
|
||||
},
|
||||
new CancellationToken(),
|
||||
mockLogger,
|
||||
stream
|
||||
);
|
||||
await generateCsv.generateData();
|
||||
|
||||
expect(content).toMatchInlineSnapshot(`
|
||||
"message,date
|
||||
\\"This is a great message!\\",\\"2020-12-31T00:14:28.000Z\\"
|
||||
"
|
||||
`);
|
||||
});
|
||||
|
||||
it('passes filters to the query', async () => {
|
||||
const query = { esql: 'from kibana_sample_data_logs | limit 10' };
|
||||
const filters = [
|
||||
{
|
||||
meta: {},
|
||||
query: {
|
||||
range: {
|
||||
'@timestamp': { format: 'strict_date_optional_time', gte: 'now-15m', lte: 'now' },
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
const generateCsv = new CsvESQLGenerator(
|
||||
createMockJob({ query, filters }),
|
||||
mockConfig,
|
||||
{
|
||||
es: mockEsClient,
|
||||
data: mockDataClient,
|
||||
uiSettings: uiSettingsClient,
|
||||
},
|
||||
new CancellationToken(),
|
||||
mockLogger,
|
||||
stream
|
||||
);
|
||||
await generateCsv.generateData();
|
||||
|
||||
expect(mockDataClient.search).toHaveBeenCalledWith(
|
||||
{
|
||||
params: {
|
||||
filter: {
|
||||
bool: {
|
||||
filter: [
|
||||
{
|
||||
range: {
|
||||
'@timestamp': {
|
||||
format: 'strict_date_optional_time',
|
||||
gte: 'now-15m',
|
||||
lte: 'now',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
must: [],
|
||||
must_not: [],
|
||||
should: [],
|
||||
},
|
||||
},
|
||||
locale: 'en',
|
||||
query: '',
|
||||
},
|
||||
},
|
||||
{
|
||||
strategy: 'esql',
|
||||
transport: {
|
||||
requestTimeout: '30s',
|
||||
},
|
||||
abortSignal: expect.any(AbortSignal),
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('formulas', () => {
|
||||
const TEST_FORMULA = '=SUM(A1:A2)';
|
||||
|
||||
it(`escapes formula values in a cell, doesn't warn the csv contains formulas`, async () => {
|
||||
mockSearchResponse({
|
||||
columns: [{ name: 'message', type: 'string' }],
|
||||
values: [[TEST_FORMULA]],
|
||||
});
|
||||
|
||||
const generateCsv = new CsvESQLGenerator(
|
||||
createMockJob(),
|
||||
mockConfig,
|
||||
{
|
||||
es: mockEsClient,
|
||||
data: mockDataClient,
|
||||
uiSettings: uiSettingsClient,
|
||||
},
|
||||
new CancellationToken(),
|
||||
mockLogger,
|
||||
stream
|
||||
);
|
||||
|
||||
const csvResult = await generateCsv.generateData();
|
||||
|
||||
expect(content).toMatchInlineSnapshot(`
|
||||
"message
|
||||
\\"'=SUM(A1:A2)\\"
|
||||
"
|
||||
`);
|
||||
expect(csvResult.csv_contains_formulas).toBe(false);
|
||||
});
|
||||
|
||||
it(`escapes formula values in a header, doesn't warn the csv contains formulas`, async () => {
|
||||
mockSearchResponse({
|
||||
columns: [{ name: TEST_FORMULA, type: 'string' }],
|
||||
values: [['This is great data']],
|
||||
});
|
||||
|
||||
const generateCsv = new CsvESQLGenerator(
|
||||
createMockJob(),
|
||||
mockConfig,
|
||||
{
|
||||
es: mockEsClient,
|
||||
data: mockDataClient,
|
||||
uiSettings: uiSettingsClient,
|
||||
},
|
||||
new CancellationToken(),
|
||||
mockLogger,
|
||||
stream
|
||||
);
|
||||
|
||||
const csvResult = await generateCsv.generateData();
|
||||
|
||||
expect(content).toMatchInlineSnapshot(`
|
||||
"\\"'=SUM(A1:A2)\\"
|
||||
\\"This is great data\\"
|
||||
"
|
||||
`);
|
||||
expect(csvResult.csv_contains_formulas).toBe(false);
|
||||
});
|
||||
|
||||
it('can check for formulas, without escaping them', async () => {
|
||||
mockConfig = {
|
||||
checkForFormulas: true,
|
||||
escapeFormulaValues: false,
|
||||
maxSizeBytes: 180000,
|
||||
useByteOrderMarkEncoding: false,
|
||||
scroll: { size: 500, duration: '30s' },
|
||||
enablePanelActionDownload: true,
|
||||
maxConcurrentShardRequests: 5,
|
||||
};
|
||||
mockSearchResponse({
|
||||
columns: [{ name: 'message', type: 'string' }],
|
||||
values: [[TEST_FORMULA]],
|
||||
});
|
||||
|
||||
const generateCsv = new CsvESQLGenerator(
|
||||
createMockJob(),
|
||||
mockConfig,
|
||||
{
|
||||
es: mockEsClient,
|
||||
data: mockDataClient,
|
||||
uiSettings: uiSettingsClient,
|
||||
},
|
||||
new CancellationToken(),
|
||||
mockLogger,
|
||||
stream
|
||||
);
|
||||
|
||||
const csvResult = await generateCsv.generateData();
|
||||
|
||||
expect(content).toMatchInlineSnapshot(`
|
||||
"message
|
||||
\\"=SUM(A1:A2)\\"
|
||||
"
|
||||
`);
|
||||
expect(csvResult.csv_contains_formulas).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('handles unknown errors', async () => {
|
||||
mockDataClient.search = jest.fn().mockImplementation(() => {
|
||||
throw new Error('An unknown error');
|
||||
});
|
||||
const generateCsv = new CsvESQLGenerator(
|
||||
createMockJob(),
|
||||
mockConfig,
|
||||
{
|
||||
es: mockEsClient,
|
||||
data: mockDataClient,
|
||||
uiSettings: uiSettingsClient,
|
||||
},
|
||||
new CancellationToken(),
|
||||
mockLogger,
|
||||
stream
|
||||
);
|
||||
await expect(generateCsv.generateData()).resolves.toMatchInlineSnapshot(`
|
||||
Object {
|
||||
"content_type": "text/csv",
|
||||
"csv_contains_formulas": false,
|
||||
"error_code": undefined,
|
||||
"max_size_reached": false,
|
||||
"metrics": Object {
|
||||
"csv": Object {
|
||||
"rows": 0,
|
||||
},
|
||||
},
|
||||
"warnings": Array [
|
||||
"Encountered an unknown error: An unknown error",
|
||||
],
|
||||
}
|
||||
`);
|
||||
});
|
||||
|
||||
describe('error codes', () => {
|
||||
it('returns the expected error code when authentication expires', async () => {
|
||||
mockDataClient.search = jest.fn().mockImplementation(() => {
|
||||
throw new esErrors.ResponseError({ statusCode: 403, meta: {} as any, warnings: [] });
|
||||
});
|
||||
|
||||
const generateCsv = new CsvESQLGenerator(
|
||||
createMockJob(),
|
||||
mockConfig,
|
||||
{
|
||||
es: mockEsClient,
|
||||
data: mockDataClient,
|
||||
uiSettings: uiSettingsClient,
|
||||
},
|
||||
new CancellationToken(),
|
||||
mockLogger,
|
||||
stream
|
||||
);
|
||||
|
||||
const { error_code: errorCode, warnings } = await generateCsv.generateData();
|
||||
expect(errorCode).toBe('authentication_expired_error');
|
||||
expect(warnings).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
"This report contains partial CSV results because the authentication token expired. Export a smaller amount of data or increase the timeout of the authentication token.",
|
||||
]
|
||||
`);
|
||||
|
||||
expect(mockLogger.error.mock.calls).toMatchInlineSnapshot(`
|
||||
Array [
|
||||
Array [
|
||||
[ResponseError: Response Error],
|
||||
],
|
||||
]
|
||||
`);
|
||||
});
|
||||
});
|
||||
});
|
231
packages/kbn-generate-csv/src/generate_csv_esql.ts
Normal file
231
packages/kbn-generate-csv/src/generate_csv_esql.ts
Normal file
|
@ -0,0 +1,231 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { lastValueFrom } from 'rxjs';
|
||||
import type { Writable } from 'stream';
|
||||
|
||||
import { errors as esErrors } from '@elastic/elasticsearch';
|
||||
import type { IScopedClusterClient, IUiSettingsClient, Logger } from '@kbn/core/server';
|
||||
import type { ESQLSearchParams, ESQLSearchReponse } from '@kbn/es-types';
|
||||
import {
|
||||
cellHasFormulas,
|
||||
ESQL_SEARCH_STRATEGY,
|
||||
getEsQueryConfig,
|
||||
IKibanaSearchRequest,
|
||||
IKibanaSearchResponse,
|
||||
} from '@kbn/data-plugin/common';
|
||||
import type { IScopedSearchClient } from '@kbn/data-plugin/server';
|
||||
import {
|
||||
AuthenticationExpiredError,
|
||||
byteSizeValueToNumber,
|
||||
CancellationToken,
|
||||
ReportingError,
|
||||
} from '@kbn/reporting-common';
|
||||
import type { TaskRunResult } from '@kbn/reporting-common/types';
|
||||
import type { ReportingConfigType } from '@kbn/reporting-server';
|
||||
import { buildEsQuery, Filter } from '@kbn/es-query';
|
||||
import { zipObject } from 'lodash';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
||||
import { CONTENT_TYPE_CSV } from './constants';
|
||||
import { CsvExportSettings, getExportSettings } from './get_export_settings';
|
||||
import { i18nTexts } from './i18n_texts';
|
||||
import { MaxSizeStringBuilder } from './max_size_string_builder';
|
||||
|
||||
export interface JobParamsCsvESQL {
|
||||
query: { esql: string };
|
||||
columns?: string[];
|
||||
filters?: Filter[];
|
||||
browserTimezone?: string;
|
||||
}
|
||||
|
||||
interface Clients {
|
||||
es: IScopedClusterClient;
|
||||
data: IScopedSearchClient;
|
||||
uiSettings: IUiSettingsClient;
|
||||
}
|
||||
|
||||
export class CsvESQLGenerator {
|
||||
private csvContainsFormulas = false;
|
||||
private maxSizeReached = false;
|
||||
private csvRowCount = 0;
|
||||
|
||||
constructor(
|
||||
private job: JobParamsCsvESQL,
|
||||
private config: ReportingConfigType['csv'],
|
||||
private clients: Clients,
|
||||
private cancellationToken: CancellationToken,
|
||||
private logger: Logger,
|
||||
private stream: Writable
|
||||
) {}
|
||||
|
||||
public async generateData(): Promise<TaskRunResult> {
|
||||
const settings = await getExportSettings(
|
||||
this.clients.uiSettings,
|
||||
this.config,
|
||||
this.job.browserTimezone,
|
||||
this.logger
|
||||
);
|
||||
|
||||
let reportingError: undefined | ReportingError;
|
||||
const warnings: string[] = [];
|
||||
|
||||
const { maxSizeBytes, bom, escapeFormulaValues } = settings;
|
||||
const builder = new MaxSizeStringBuilder(this.stream, byteSizeValueToNumber(maxSizeBytes), bom);
|
||||
|
||||
const filter =
|
||||
this.job.filters &&
|
||||
buildEsQuery(
|
||||
undefined,
|
||||
[],
|
||||
this.job.filters,
|
||||
getEsQueryConfig(this.clients.uiSettings as Parameters<typeof getEsQueryConfig>[0])
|
||||
);
|
||||
|
||||
const searchParams: IKibanaSearchRequest<ESQLSearchParams> = {
|
||||
params: {
|
||||
query: this.job.query.esql,
|
||||
filter,
|
||||
// locale can be used for number/date formatting
|
||||
locale: i18n.getLocale(),
|
||||
// TODO: time_zone support was temporarily removed from ES|QL,
|
||||
// we will need to add it back in once it is supported again.
|
||||
// https://github.com/elastic/elasticsearch/pull/102767
|
||||
// timezone
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
const abortController = new AbortController();
|
||||
this.cancellationToken.on(() => abortController.abort());
|
||||
const { rawResponse, warning } = await lastValueFrom(
|
||||
this.clients.data.search<
|
||||
IKibanaSearchRequest<ESQLSearchParams>,
|
||||
IKibanaSearchResponse<ESQLSearchReponse>
|
||||
>(searchParams, {
|
||||
strategy: ESQL_SEARCH_STRATEGY,
|
||||
abortSignal: abortController.signal,
|
||||
transport: {
|
||||
requestTimeout: settings.scroll.duration,
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
if (warning) {
|
||||
warnings.push(warning);
|
||||
}
|
||||
|
||||
const responseColumns = rawResponse.columns?.map(({ name }) => name) ?? [];
|
||||
const visibleColumns =
|
||||
this.job.columns && this.job.columns.length > 0
|
||||
? this.job.columns.filter((column) => responseColumns.includes(column))
|
||||
: responseColumns;
|
||||
|
||||
const rows = rawResponse.values.map((row) => zipObject(responseColumns, row));
|
||||
|
||||
const header =
|
||||
Array.from(visibleColumns).map(this.escapeValues(settings)).join(settings.separator) + '\n';
|
||||
builder.tryAppend(header);
|
||||
|
||||
await this.generateRows(visibleColumns, rows, builder, settings);
|
||||
} catch (err) {
|
||||
this.logger.error(err);
|
||||
if (err instanceof esErrors.ResponseError) {
|
||||
if ([401, 403].includes(err.statusCode ?? 0)) {
|
||||
reportingError = new AuthenticationExpiredError();
|
||||
warnings.push(i18nTexts.authenticationError.partialResultsMessage);
|
||||
} else {
|
||||
warnings.push(i18nTexts.esErrorMessage(err.statusCode ?? 0, String(err.body)));
|
||||
}
|
||||
} else {
|
||||
warnings.push(i18nTexts.unknownError(err?.message ?? err));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content_type: CONTENT_TYPE_CSV,
|
||||
csv_contains_formulas: this.csvContainsFormulas && !escapeFormulaValues,
|
||||
max_size_reached: this.maxSizeReached,
|
||||
metrics: {
|
||||
csv: { rows: this.csvRowCount },
|
||||
},
|
||||
warnings,
|
||||
error_code: reportingError?.code,
|
||||
};
|
||||
}
|
||||
|
||||
/*
|
||||
* Format a Datatable into rows of CSV content
|
||||
*/
|
||||
private async generateRows(
|
||||
columns: string[],
|
||||
rows: Array<Record<string, unknown>>,
|
||||
builder: MaxSizeStringBuilder,
|
||||
settings: CsvExportSettings
|
||||
) {
|
||||
this.logger.debug(`Building ${rows.length} CSV data rows`);
|
||||
for (const dataTableRow of rows) {
|
||||
if (this.cancellationToken.isCancelled()) {
|
||||
break;
|
||||
}
|
||||
|
||||
/*
|
||||
* Intrinsically, generating the rows is a synchronous process. Awaiting
|
||||
* on a setImmediate call here partititions what could be a very long and
|
||||
* CPU-intenstive synchronous process into an asychronous process. This
|
||||
* give NodeJS to process other asychronous events that wait on the Event
|
||||
* Loop.
|
||||
*
|
||||
* See: https://nodejs.org/en/docs/guides/dont-block-the-event-loop/
|
||||
*
|
||||
* It's likely this creates a lot of context switching, and adds to the
|
||||
* time it would take to generate the CSV. There are alternatives to the
|
||||
* chosen performance solution:
|
||||
*
|
||||
* 1. Partition the synchronous process with fewer partitions, by using
|
||||
* the loop counter to call setImmediate only every N amount of rows.
|
||||
* Testing is required to see what the best N value for most data will
|
||||
* be.
|
||||
*
|
||||
* 2. Use a C++ add-on to generate the CSV using the Node Worker Pool
|
||||
* instead of using the Event Loop
|
||||
*/
|
||||
await new Promise(setImmediate);
|
||||
|
||||
const rowDefinition: string[] = [];
|
||||
const escape = this.escapeValues(settings);
|
||||
|
||||
for (const column of columns) {
|
||||
let formattedValue: string = escape(`${dataTableRow[column]}`);
|
||||
if (formattedValue === 'null') formattedValue = '';
|
||||
if (formattedValue === 'undefined') formattedValue = '';
|
||||
rowDefinition.push(formattedValue);
|
||||
}
|
||||
|
||||
if (!builder.tryAppend(rowDefinition.join(settings.separator) + '\n')) {
|
||||
this.logger.warn(`Max Size Reached after ${this.csvRowCount} rows.`);
|
||||
this.maxSizeReached = true;
|
||||
if (this.cancellationToken) {
|
||||
this.cancellationToken.cancel();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
this.csvRowCount++;
|
||||
}
|
||||
}
|
||||
|
||||
private escapeValues(settings: CsvExportSettings) {
|
||||
return (value: string) => {
|
||||
if (settings.checkForFormulas && cellHasFormulas(value)) {
|
||||
this.csvContainsFormulas = true; // set warning if cell value has a formula
|
||||
}
|
||||
return settings.escapeValue(value);
|
||||
};
|
||||
}
|
||||
}
|
|
@ -26,5 +26,7 @@
|
|||
"@kbn/reporting-common",
|
||||
"@kbn/reporting-server",
|
||||
"@kbn/reporting-export-types-csv-common",
|
||||
"@kbn/es-query",
|
||||
"@kbn/es-types",
|
||||
]
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ import { Writable } from 'stream';
|
|||
import type { KibanaRequest } from '@kbn/core/server';
|
||||
import type { DataPluginStart } from '@kbn/data-plugin/server/plugin';
|
||||
import type { DiscoverServerPluginStart } from '@kbn/discover-plugin/server';
|
||||
import { CsvGenerator } from '@kbn/generate-csv';
|
||||
import { CsvGenerator, CsvESQLGenerator } from '@kbn/generate-csv';
|
||||
import {
|
||||
CancellationToken,
|
||||
LICENSE_TYPE_BASIC,
|
||||
|
@ -88,14 +88,10 @@ export class CsvV2ExportType extends ExportType<
|
|||
throw Boom.badRequest('Invalid Job params: must contain a single Discover App locator');
|
||||
}
|
||||
|
||||
if (!params || !params.savedSearchId || typeof params.savedSearchId !== 'string') {
|
||||
throw Boom.badRequest('Invalid Discover App locator: must contain a savedSearchId');
|
||||
}
|
||||
|
||||
// use Discover contract to get the title of the report from job params
|
||||
const { discover: discoverPluginStart } = this.startDeps;
|
||||
const locatorClient = await discoverPluginStart.locator.asScopedClient(req);
|
||||
const title = await locatorClient.titleFromLocator(params);
|
||||
const title = jobParams.title || (await locatorClient.titleFromLocator(params));
|
||||
|
||||
return { ...jobParams, title, objectType: 'search', isDeprecated: false };
|
||||
};
|
||||
|
@ -124,6 +120,38 @@ export class CsvV2ExportType extends ExportType<
|
|||
|
||||
// use Discover contract to convert the job params into inputs for CsvGenerator
|
||||
const locatorClient = await discoverPluginStart.locator.asScopedClient(fakeRequest);
|
||||
|
||||
const query = await locatorClient.queryFromLocator(params);
|
||||
|
||||
if (query && 'esql' in query) {
|
||||
// TODO: use columnsFromLocator
|
||||
// currently locatorClient.columnsFromLocator can only extract columns from the saved search,
|
||||
// but for the es|ql we simply want to get currently visible columns from params.columns.
|
||||
// we didn't want to add this change inside locatorClient.columnsFromLocator, as it would change the behaviour of csv_v2 for non-ES|QL export,
|
||||
// this should be addressed here https://github.com/elastic/kibana/issues/151190
|
||||
// const columns = await locatorClient.columnsFromLocator(params);
|
||||
const columns = params.columns as string[] | undefined;
|
||||
const filters = await locatorClient.filtersFromLocator(params);
|
||||
const es = this.startDeps.esClient.asScoped(fakeRequest);
|
||||
|
||||
const clients = { uiSettings, data, es };
|
||||
|
||||
const csv = new CsvESQLGenerator(
|
||||
{
|
||||
columns,
|
||||
query,
|
||||
filters,
|
||||
...job,
|
||||
},
|
||||
csvConfig,
|
||||
clients,
|
||||
cancellationToken,
|
||||
logger,
|
||||
stream
|
||||
);
|
||||
return await csv.generateData();
|
||||
}
|
||||
|
||||
const columns = await locatorClient.columnsFromLocator(params);
|
||||
const searchSource = await locatorClient.searchSourceFromLocator(params);
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ import { zipObject } from 'lodash';
|
|||
import { Observable, defer, throwError } from 'rxjs';
|
||||
import { catchError, map, switchMap, tap } from 'rxjs/operators';
|
||||
import { buildEsQuery } from '@kbn/es-query';
|
||||
import type { ESQLSearchReponse } from '@kbn/es-types';
|
||||
import type { ESQLSearchReponse, ESQLSearchParams } from '@kbn/es-types';
|
||||
import { getEsQueryConfig } from '../../es_query';
|
||||
import { getTime } from '../../query';
|
||||
import { ESQL_SEARCH_STRATEGY, IKibanaSearchRequest, ISearchGeneric, KibanaContext } from '..';
|
||||
|
@ -81,16 +81,6 @@ function extractTypeAndReason(attributes: any): { type?: string; reason?: string
|
|||
return {};
|
||||
}
|
||||
|
||||
interface ESQLSearchParams {
|
||||
// TODO: time_zone support was temporarily removed from ES|QL,
|
||||
// we will need to add it back in once it is supported again.
|
||||
// https://github.com/elastic/elasticsearch/pull/102767
|
||||
// time_zone?: string;
|
||||
query: string;
|
||||
filter?: unknown;
|
||||
locale?: string;
|
||||
}
|
||||
|
||||
export const getEsqlFn = ({ getStartDependencies }: EsqlFnArguments) => {
|
||||
const essql: EsqlExpressionFunctionDefinition = {
|
||||
name: 'esql',
|
||||
|
|
|
@ -48,3 +48,4 @@ export * from './es_raw_response';
|
|||
export * from './eql_raw_response';
|
||||
export * from './esdsl';
|
||||
export * from './eql';
|
||||
export * from './esql';
|
||||
|
|
|
@ -12,8 +12,6 @@ import { getKbnSearchError, KbnSearchError } from '../../report_search_error';
|
|||
import type { ISearchStrategy } from '../../types';
|
||||
import { sanitizeRequestParams } from '../../sanitize_request_params';
|
||||
|
||||
const ES_TIMEOUT_IN_MS = 120000;
|
||||
|
||||
export const esqlSearchStrategyProvider = (
|
||||
logger: Logger,
|
||||
useInternalUser: boolean = false
|
||||
|
@ -26,17 +24,6 @@ export const esqlSearchStrategyProvider = (
|
|||
* @returns `Observable<IEsSearchResponse<any>>`
|
||||
*/
|
||||
search: (request, { abortSignal, ...options }, { esClient, uiSettingsClient }) => {
|
||||
const abortController = new AbortController();
|
||||
// We found out that there are cases where we are not aborting correctly
|
||||
// For this reasons we want to manually cancel he abort signal after 2 mins
|
||||
|
||||
abortSignal?.addEventListener('abort', () => {
|
||||
abortController.abort();
|
||||
});
|
||||
|
||||
// Also abort after two mins
|
||||
setTimeout(() => abortController.abort(), ES_TIMEOUT_IN_MS);
|
||||
|
||||
// Only default index pattern type is supported here.
|
||||
// See ese for other type support.
|
||||
if (request.indexType) {
|
||||
|
@ -55,10 +42,11 @@ export const esqlSearchStrategyProvider = (
|
|||
},
|
||||
},
|
||||
{
|
||||
signal: abortController.signal,
|
||||
signal: abortSignal,
|
||||
meta: true,
|
||||
// we don't want the ES client to retry (default value is 3)
|
||||
maxRetries: 0,
|
||||
requestTimeout: options.transport?.requestTimeout,
|
||||
}
|
||||
);
|
||||
return {
|
||||
|
|
|
@ -122,7 +122,7 @@ export const getTopNavLinks = ({
|
|||
run: async (anchorElement: HTMLElement) => {
|
||||
if (!services.share) return;
|
||||
const savedSearch = state.savedSearchState.getState();
|
||||
const sharingData = await getSharingData(
|
||||
const searchSourceSharingData = await getSharingData(
|
||||
savedSearch.searchSource,
|
||||
state.appState.getState(),
|
||||
services,
|
||||
|
@ -183,7 +183,9 @@ export const getTopNavLinks = ({
|
|||
objectId: savedSearch.id,
|
||||
objectType: 'search',
|
||||
sharingData: {
|
||||
...sharingData,
|
||||
isTextBased,
|
||||
locatorParams: [{ id: locator.id, params }],
|
||||
...searchSourceSharingData,
|
||||
// CSV reports can be generated without a saved search so we provide a fallback title
|
||||
title:
|
||||
savedSearch.title ||
|
||||
|
|
|
@ -8,7 +8,13 @@
|
|||
|
||||
import { KibanaRequest, PluginInitializerContext } from '@kbn/core/server';
|
||||
import { DataPluginStart } from '@kbn/data-plugin/server/plugin';
|
||||
import { ColumnsFromLocatorFn, SearchSourceFromLocatorFn, TitleFromLocatorFn } from './locator';
|
||||
import {
|
||||
ColumnsFromLocatorFn,
|
||||
SearchSourceFromLocatorFn,
|
||||
TitleFromLocatorFn,
|
||||
QueryFromLocatorFn,
|
||||
FiltersFromLocatorFn,
|
||||
} from './locator';
|
||||
|
||||
export interface DiscoverServerPluginStartDeps {
|
||||
data: DataPluginStart;
|
||||
|
@ -18,6 +24,8 @@ export interface LocatorServiceScopedClient {
|
|||
columnsFromLocator: ColumnsFromLocatorFn;
|
||||
searchSourceFromLocator: SearchSourceFromLocatorFn;
|
||||
titleFromLocator: TitleFromLocatorFn;
|
||||
queryFromLocator: QueryFromLocatorFn;
|
||||
filtersFromLocator: FiltersFromLocatorFn;
|
||||
}
|
||||
|
||||
export interface DiscoverServerPluginLocatorService {
|
||||
|
|
55
src/plugins/discover/server/locator/filters_from_locator.ts
Normal file
55
src/plugins/discover/server/locator/filters_from_locator.ts
Normal file
|
@ -0,0 +1,55 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { Filter } from '@kbn/es-query';
|
||||
import { LocatorServicesDeps } from '.';
|
||||
import { DiscoverAppLocatorParams } from '../../common';
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export const filtersFromLocatorFactory = (services: LocatorServicesDeps) => {
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
const filtersFromLocator = async (params: DiscoverAppLocatorParams): Promise<Filter[]> => {
|
||||
const filters: Filter[] = [];
|
||||
|
||||
if (params.timeRange && params.dataViewSpec?.timeFieldName) {
|
||||
const timeRange = params.timeRange;
|
||||
const timeFieldName = params.dataViewSpec.timeFieldName;
|
||||
|
||||
if (timeRange) {
|
||||
filters.push({
|
||||
meta: {},
|
||||
query: {
|
||||
range: {
|
||||
[timeFieldName]: {
|
||||
format: 'strict_date_optional_time',
|
||||
gte: timeRange.from,
|
||||
lte: timeRange.to,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (params.filters) {
|
||||
filters.push(...params.filters);
|
||||
}
|
||||
|
||||
return filters;
|
||||
|
||||
// TODO: support extracting filters from saved search
|
||||
};
|
||||
|
||||
return filtersFromLocator;
|
||||
};
|
||||
|
||||
export type FiltersFromLocatorFn = ReturnType<typeof filtersFromLocatorFactory>;
|
|
@ -14,6 +14,8 @@ import { getScopedClient } from './service';
|
|||
export type { ColumnsFromLocatorFn } from './columns_from_locator';
|
||||
export type { SearchSourceFromLocatorFn } from './searchsource_from_locator';
|
||||
export type { TitleFromLocatorFn } from './title_from_locator';
|
||||
export type { QueryFromLocatorFn } from './query_from_locator';
|
||||
export type { FiltersFromLocatorFn } from './filters_from_locator';
|
||||
|
||||
/**
|
||||
* @internal
|
||||
|
|
|
@ -7,7 +7,8 @@
|
|||
*/
|
||||
|
||||
import { KibanaRequest } from '@kbn/core/server';
|
||||
import { SearchSource } from '@kbn/data-plugin/common';
|
||||
import { Query, SearchSource } from '@kbn/data-plugin/common';
|
||||
import { AggregateQuery, Filter } from '@kbn/es-query';
|
||||
import { createSearchSourceMock } from '@kbn/data-plugin/common/search/search_source/mocks';
|
||||
import { DiscoverServerPluginLocatorService, LocatorServiceScopedClient } from '..';
|
||||
import { DiscoverAppLocatorParams } from '../../common';
|
||||
|
@ -27,6 +28,14 @@ export const createLocatorServiceMock = (): DiscoverServerPluginLocatorService =
|
|||
.fn<Promise<string>, [DiscoverAppLocatorParams]>()
|
||||
.mockResolvedValue('mock search title');
|
||||
|
||||
const queryFromLocatorMock = jest
|
||||
.fn<Promise<Query | AggregateQuery | undefined>, [DiscoverAppLocatorParams]>()
|
||||
.mockResolvedValue(undefined);
|
||||
|
||||
const filtersFromLocatorMock = jest
|
||||
.fn<Promise<Filter[]>, [DiscoverAppLocatorParams]>()
|
||||
.mockResolvedValue([]);
|
||||
|
||||
return {
|
||||
asScopedClient: jest
|
||||
.fn<Promise<LocatorServiceScopedClient>, [req: KibanaRequest]>()
|
||||
|
@ -35,6 +44,8 @@ export const createLocatorServiceMock = (): DiscoverServerPluginLocatorService =
|
|||
columnsFromLocator: columnsFromLocatorMock,
|
||||
searchSourceFromLocator: searchSourceFromLocatorMock,
|
||||
titleFromLocator: titleFromLocatorMock,
|
||||
queryFromLocator: queryFromLocatorMock,
|
||||
filtersFromLocator: filtersFromLocatorMock,
|
||||
} as LocatorServiceScopedClient);
|
||||
}),
|
||||
};
|
||||
|
|
31
src/plugins/discover/server/locator/query_from_locator.ts
Normal file
31
src/plugins/discover/server/locator/query_from_locator.ts
Normal file
|
@ -0,0 +1,31 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
import { AggregateQuery, Query } from '@kbn/es-query';
|
||||
import { LocatorServicesDeps } from '.';
|
||||
import { DiscoverAppLocatorParams } from '../../common';
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export const queryFromLocatorFactory = (services: LocatorServicesDeps) => {
|
||||
/**
|
||||
* @public
|
||||
*/
|
||||
const queryFromLocator = async (
|
||||
params: DiscoverAppLocatorParams
|
||||
): Promise<Query | AggregateQuery | undefined> => {
|
||||
return params.query;
|
||||
|
||||
// TODO: support query from saved search
|
||||
};
|
||||
|
||||
return queryFromLocator;
|
||||
};
|
||||
|
||||
export type QueryFromLocatorFn = ReturnType<typeof queryFromLocatorFactory>;
|
|
@ -11,6 +11,8 @@ import { DiscoverServerPluginLocatorService, DiscoverServerPluginStartDeps } fro
|
|||
import { columnsFromLocatorFactory } from './columns_from_locator';
|
||||
import { searchSourceFromLocatorFactory } from './searchsource_from_locator';
|
||||
import { titleFromLocatorFactory } from './title_from_locator';
|
||||
import { queryFromLocatorFactory } from './query_from_locator';
|
||||
import { filtersFromLocatorFactory } from './filters_from_locator';
|
||||
|
||||
export const getScopedClient = (
|
||||
core: CoreStart,
|
||||
|
@ -27,6 +29,8 @@ export const getScopedClient = (
|
|||
columnsFromLocator: columnsFromLocatorFactory(services),
|
||||
searchSourceFromLocator: searchSourceFromLocatorFactory(services),
|
||||
titleFromLocator: titleFromLocatorFactory(services),
|
||||
queryFromLocator: queryFromLocatorFactory(services),
|
||||
filtersFromLocator: filtersFromLocatorFactory(services),
|
||||
};
|
||||
},
|
||||
};
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import { i18n } from '@kbn/i18n';
|
||||
import React from 'react';
|
||||
|
||||
import { CSV_JOB_TYPE } from '@kbn/reporting-export-types-csv-common';
|
||||
import { CSV_JOB_TYPE, CSV_JOB_TYPE_V2 } from '@kbn/reporting-export-types-csv-common';
|
||||
|
||||
import type { SearchSourceFields } from '@kbn/data-plugin/common';
|
||||
import { ShareContext, ShareMenuProvider } from '@kbn/share-plugin/public';
|
||||
|
@ -30,6 +30,10 @@ export const reportingCsvShareProvider = ({
|
|||
return [];
|
||||
}
|
||||
|
||||
// only csv v2 supports esql (isTextBased) reports
|
||||
// TODO: whole csv reporting should move to v2 https://github.com/elastic/kibana/issues/151190
|
||||
const reportType = sharingData.isTextBased ? CSV_JOB_TYPE_V2 : CSV_JOB_TYPE;
|
||||
|
||||
const getSearchSource = sharingData.getSearchSource as ({
|
||||
addGlobalTimeFilter,
|
||||
absoluteTime,
|
||||
|
@ -41,12 +45,21 @@ export const reportingCsvShareProvider = ({
|
|||
const jobParams = {
|
||||
title: sharingData.title as string,
|
||||
objectType,
|
||||
columns: sharingData.columns as string[] | undefined,
|
||||
};
|
||||
|
||||
const getJobParams = (forShareUrl?: boolean) => {
|
||||
if (reportType === CSV_JOB_TYPE_V2) {
|
||||
// csv v2 uses locator params
|
||||
return {
|
||||
...jobParams,
|
||||
locatorParams: sharingData.locatorParams as [Record<string, unknown>],
|
||||
};
|
||||
}
|
||||
|
||||
// csv v1 uses search source and columns
|
||||
return {
|
||||
...jobParams,
|
||||
columns: sharingData.columns as string[] | undefined,
|
||||
searchSource: getSearchSource({
|
||||
addGlobalTimeFilter: true,
|
||||
absoluteTime: !forShareUrl,
|
||||
|
@ -92,7 +105,7 @@ export const reportingCsvShareProvider = ({
|
|||
apiClient={apiClient}
|
||||
toasts={toasts}
|
||||
uiSettings={uiSettings}
|
||||
reportType={CSV_JOB_TYPE}
|
||||
reportType={reportType}
|
||||
layoutId={undefined}
|
||||
objectId={objectId}
|
||||
getJobParams={getJobParams}
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
import React, { Component, ReactElement } from 'react';
|
||||
import url from 'url';
|
||||
|
||||
import { CSV_REPORT_TYPE } from '@kbn/reporting-export-types-csv-common';
|
||||
import { CSV_REPORT_TYPE, CSV_REPORT_TYPE_V2 } from '@kbn/reporting-export-types-csv-common';
|
||||
import { PDF_REPORT_TYPE, PDF_REPORT_TYPE_V2 } from '@kbn/reporting-export-types-pdf-common';
|
||||
import { PNG_REPORT_TYPE, PNG_REPORT_TYPE_V2 } from '@kbn/reporting-export-types-png-common';
|
||||
|
||||
|
@ -251,7 +251,8 @@ class ReportingPanelContentUi extends Component<Props, State> {
|
|||
case PDF_REPORT_TYPE_V2:
|
||||
return 'PDF';
|
||||
case CSV_REPORT_TYPE:
|
||||
return 'csv';
|
||||
case CSV_REPORT_TYPE_V2:
|
||||
return 'CSV';
|
||||
case 'png':
|
||||
case PNG_REPORT_TYPE_V2:
|
||||
return PNG_REPORT_TYPE;
|
||||
|
|
|
@ -1649,6 +1649,18 @@ exports[`discover Discover CSV Export Generate CSV: archived search generates a
|
|||
"
|
||||
`;
|
||||
|
||||
exports[`discover Discover CSV Export Generate CSV: new search generate a report using ES|QL 1`] = `
|
||||
"\\"total_sales\\",\\"day_of_week\\"
|
||||
\\"58215.58984375\\",Friday
|
||||
\\"57807.375\\",Thursday
|
||||
\\"53841.03515625\\",Saturday
|
||||
\\"45850.0546875\\",Sunday
|
||||
\\"45410.2890625\\",Monday
|
||||
\\"45080.90625\\",Wednesday
|
||||
\\"44678.87890625\\",Tuesday
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`discover Discover CSV Export Generate CSV: new search generates a large export 1`] = `
|
||||
"\\"_id\\",\\"_index\\",\\"_score\\",category,\\"category.keyword\\",currency,\\"customer_first_name\\",\\"customer_first_name.keyword\\",\\"customer_full_name\\",\\"customer_full_name.keyword\\",\\"customer_gender\\",\\"customer_id\\",\\"customer_last_name\\",\\"customer_last_name.keyword\\",\\"customer_phone\\",\\"day_of_week\\",\\"day_of_week_i\\",email,\\"geoip.city_name\\",\\"geoip.continent_name\\",\\"geoip.country_iso_code\\",\\"geoip.location\\",\\"geoip.region_name\\",manufacturer,\\"manufacturer.keyword\\",\\"order_date\\",\\"order_id\\",\\"products._id\\",\\"products._id.keyword\\",\\"products.base_price\\",\\"products.base_unit_price\\",\\"products.category\\",\\"products.category.keyword\\",\\"products.created_on\\",\\"products.discount_amount\\",\\"products.discount_percentage\\",\\"products.manufacturer\\",\\"products.manufacturer.keyword\\",\\"products.min_price\\",\\"products.price\\",\\"products.product_id\\",\\"products.product_name\\",\\"products.product_name.keyword\\",\\"products.quantity\\",\\"products.sku\\",\\"products.tax_amount\\",\\"products.taxful_price\\",\\"products.taxless_price\\",\\"products.unit_discount_amount\\",sku,\\"taxful_total_price\\",\\"taxless_total_price\\",\\"total_quantity\\",\\"total_unique_products\\",type,user
|
||||
3AMtOW0BH63Xcmy432DJ,ecommerce,\\"-\\",\\"Men's Shoes, Men's Clothing, Women's Accessories, Men's Accessories\\",\\"Men's Shoes, Men's Clothing, Women's Accessories, Men's Accessories\\",EUR,\\"Sultan Al\\",\\"Sultan Al\\",\\"Sultan Al Boone\\",\\"Sultan Al Boone\\",MALE,19,Boone,Boone,\\"(empty)\\",Saturday,5,\\"sultan al@boone-family.zzz\\",\\"Abu Dhabi\\",Asia,AE,\\"POINT (54.4 24.5)\\",\\"Abu Dhabi\\",\\"Angeldale, Oceanavigations, Microlutions\\",\\"Angeldale, Oceanavigations, Microlutions\\",\\"Jul 12, 2019 @ 00:00:00.000\\",716724,\\"sold_product_716724_23975, sold_product_716724_6338, sold_product_716724_14116, sold_product_716724_15290\\",\\"sold_product_716724_23975, sold_product_716724_6338, sold_product_716724_14116, sold_product_716724_15290\\",\\"80, 60, 21.984, 11.992\\",\\"80, 60, 21.984, 11.992\\",\\"Men's Shoes, Men's Clothing, Women's Accessories, Men's Accessories\\",\\"Men's Shoes, Men's Clothing, Women's Accessories, Men's Accessories\\",\\"Dec 31, 2016 @ 00:00:00.000, Dec 31, 2016 @ 00:00:00.000, Dec 31, 2016 @ 00:00:00.000, Dec 31, 2016 @ 00:00:00.000\\",\\"0, 0, 0, 0\\",\\"0, 0, 0, 0\\",\\"Angeldale, Oceanavigations, Microlutions, Oceanavigations\\",\\"Angeldale, Oceanavigations, Microlutions, Oceanavigations\\",\\"42.375, 33, 10.344, 6.109\\",\\"80, 60, 21.984, 11.992\\",\\"23,975, 6,338, 14,116, 15,290\\",\\"Winter boots - cognac, Trenchcoat - black, Watch - black, Hat - light grey multicolor\\",\\"Winter boots - cognac, Trenchcoat - black, Watch - black, Hat - light grey multicolor\\",\\"1, 1, 1, 1\\",\\"ZO0687606876, ZO0290502905, ZO0126701267, ZO0308503085\\",\\"0, 0, 0, 0\\",\\"80, 60, 21.984, 11.992\\",\\"80, 60, 21.984, 11.992\\",\\"0, 0, 0, 0\\",\\"ZO0687606876, ZO0290502905, ZO0126701267, ZO0308503085\\",174,174,4,4,order,sultan
|
||||
|
|
|
@ -18,7 +18,15 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
const kibanaServer = getService('kibanaServer');
|
||||
const browser = getService('browser');
|
||||
const retry = getService('retry');
|
||||
const PageObjects = getPageObjects(['reporting', 'common', 'discover', 'timePicker', 'share']);
|
||||
const PageObjects = getPageObjects([
|
||||
'reporting',
|
||||
'common',
|
||||
'discover',
|
||||
'timePicker',
|
||||
'share',
|
||||
'header',
|
||||
]);
|
||||
const monacoEditor = getService('monacoEditor');
|
||||
const filterBar = getService('filterBar');
|
||||
const find = getService('find');
|
||||
const testSubjects = getService('testSubjects');
|
||||
|
@ -170,6 +178,22 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
expectSnapshot(csvFile.slice(0, 5000)).toMatch();
|
||||
expectSnapshot(csvFile.slice(-5000)).toMatch();
|
||||
});
|
||||
|
||||
it('generate a report using ES|QL', async () => {
|
||||
await PageObjects.discover.selectTextBaseLang();
|
||||
const testQuery = `from ecommerce | STATS total_sales = SUM(taxful_total_price) BY day_of_week | SORT total_sales DESC`;
|
||||
|
||||
await monacoEditor.setCodeEditorValue(testQuery);
|
||||
await testSubjects.click('querySubmitButton');
|
||||
await PageObjects.header.waitUntilLoadingHasFinished();
|
||||
|
||||
const res = await getReport();
|
||||
expect(res.status).to.equal(200);
|
||||
expect(res.get('content-type')).to.equal('text/csv; charset=utf-8');
|
||||
|
||||
const csvFile = res.text;
|
||||
expectSnapshot(csvFile).toMatch();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Generate CSV: sparse data', () => {
|
||||
|
|
40
x-pack/test/reporting_api_integration/reporting_and_security/__snapshots__/csv_v2_esql.snap
generated
Normal file
40
x-pack/test/reporting_api_integration/reporting_and_security/__snapshots__/csv_v2_esql.snap
generated
Normal file
|
@ -0,0 +1,40 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Reporting APIs CSV Generation from ES|QL export from non-timebased data view csv from es|ql csv file matches 1`] = `
|
||||
"eon,epoch,era,period
|
||||
Phanerozoic,\\" Pliocene\\",Cenozoic,Neogene
|
||||
Phanerozoic,\\" Holocene\\",Cenozoic,Quaternary
|
||||
Phanerozoic,,Mesozoic,Cretaceous
|
||||
Phanerozoic,,Mesozoic,Jurassic
|
||||
Phanerozoic,,Paleozoic,Cambrian
|
||||
Proterozoic,,Paleozoic,Permian
|
||||
Archean,,,
|
||||
Hadean,,,
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`Reporting APIs CSV Generation from ES|QL export from non-timebased data view csv from es|ql job response data is correct 1`] = `
|
||||
Object {
|
||||
"contentDisposition": "attachment; filename=CSV%20Report.csv",
|
||||
"contentType": "text/csv; charset=utf-8",
|
||||
"title": "CSV Report",
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`Reporting APIs CSV Generation from ES|QL export from timebased data view csv from es|ql export with time filter csv file matches 1`] = `
|
||||
"\\"@message\\"
|
||||
\\"143.84.142.7 - - [2015-09-20T00:00:00.000Z] \\"\\"GET /uploads/steven-hawley.jpg HTTP/1.1\\"\\" 200 1623 \\"\\"-\\"\\" \\"\\"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)\\"\\"\\"
|
||||
\\"193.164.192.47 - - [2015-09-20T00:30:34.206Z] \\"\\"GET /uploads/michael-foreman.jpg HTTP/1.1\\"\\" 200 8537 \\"\\"-\\"\\" \\"\\"Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 Firefox/6.0a1\\"\\"\\"
|
||||
\\"176.7.244.68 - - [2015-09-20T00:32:42.058Z] \\"\\"GET /uploads/james-pawelczyk.jpg HTTP/1.1\\"\\" 200 9196 \\"\\"-\\"\\" \\"\\"Mozilla/5.0 (X11; Linux i686) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/11.0.696.50 Safari/534.24\\"\\"\\"
|
||||
\\"237.56.90.184 - - [2015-09-20T00:35:21.445Z] \\"\\"GET /uploads/david-leestma.jpg HTTP/1.1\\"\\" 200 9790 \\"\\"-\\"\\" \\"\\"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322)\\"\\"\\"
|
||||
\\"255.56.89.50 - - [2015-09-20T00:43:01.353Z] \\"\\"GET /uploads/michael-r-barratt.jpg HTTP/1.1\\"\\" 200 9583 \\"\\"-\\"\\" \\"\\"Mozilla/5.0 (X11; Linux x86_64; rv:6.0a1) Gecko/20110421 Firefox/6.0a1\\"\\"\\"
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`Reporting APIs CSV Generation from ES|QL export from timebased data view csv from es|ql export with time filter job response data is correct 1`] = `
|
||||
Object {
|
||||
"contentDisposition": "attachment; filename=Untitled%20discover%20search.csv",
|
||||
"contentType": "text/csv; charset=utf-8",
|
||||
"title": "Untitled discover search",
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,211 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0; you may not use this file except in compliance with the Elastic License
|
||||
* 2.0.
|
||||
*/
|
||||
|
||||
import expect from '@kbn/expect';
|
||||
import request from 'supertest';
|
||||
|
||||
import { DISCOVER_APP_LOCATOR } from '@kbn/discover-plugin/common';
|
||||
import type { JobParamsCsvFromSavedObject } from '@kbn/reporting-export-types-csv-common';
|
||||
import type { ReportApiJSON } from '@kbn/reporting-common/types';
|
||||
import rison from '@kbn/rison';
|
||||
import { FtrProviderContext } from '../ftr_provider_context';
|
||||
|
||||
// eslint-disable-next-line import/no-default-export
|
||||
export default ({ getService }: FtrProviderContext) => {
|
||||
const es = getService('es');
|
||||
const supertest = getService('supertest');
|
||||
const esArchiver = getService('esArchiver');
|
||||
const reportingAPI = getService('reportingAPI');
|
||||
const log = getService('log');
|
||||
|
||||
const requestCsv = async (
|
||||
params: Omit<JobParamsCsvFromSavedObject, 'objectType' | 'browserTimezone' | 'version'>
|
||||
) => {
|
||||
const job: JobParamsCsvFromSavedObject = {
|
||||
browserTimezone: (params as JobParamsCsvFromSavedObject).browserTimezone ?? 'UTC',
|
||||
objectType: 'search',
|
||||
version: '8.13.0',
|
||||
title: 'CSV Report',
|
||||
...params,
|
||||
};
|
||||
log.info(`sending request for query: ${job.locatorParams[0].params.query}`);
|
||||
const jobParams = rison.encode(job);
|
||||
return await supertest
|
||||
.post(`/api/reporting/generate/csv_v2`)
|
||||
.set('kbn-xsrf', 'xxx')
|
||||
.send({ jobParams });
|
||||
};
|
||||
|
||||
describe('CSV Generation from ES|QL', () => {
|
||||
describe('export from non-timebased data view', () => {
|
||||
const timelessIndexName = 'timeless-test';
|
||||
const loadTimelessData = async () => {
|
||||
log.info(`loading test data`);
|
||||
await es.indices.create({
|
||||
index: timelessIndexName,
|
||||
body: {
|
||||
settings: { number_of_shards: 1 },
|
||||
mappings: {
|
||||
properties: {
|
||||
eon: { type: 'keyword' },
|
||||
era: { type: 'keyword' },
|
||||
period: { type: 'keyword' },
|
||||
epoch: { type: 'keyword' },
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
await es.bulk({
|
||||
refresh: 'wait_for',
|
||||
body: [
|
||||
{ index: { _index: timelessIndexName, _id: 'tvJJX4UBvD7uFsw9L2x4' } },
|
||||
{ eon: 'Phanerozoic', era: 'Cenozoic', period: 'Neogene', epoch: ' Pliocene' },
|
||||
{ index: { _index: timelessIndexName, _id: 't_JJX4UBvD7uFsw9L2x4' } },
|
||||
{ eon: 'Phanerozoic', era: 'Cenozoic', period: 'Quaternary', epoch: ' Holocene' },
|
||||
{ index: { _index: timelessIndexName, _id: 'uPJJX4UBvD7uFsw9L2x4' } },
|
||||
{ eon: 'Phanerozoic', era: 'Mesozoic', period: 'Cretaceous' },
|
||||
{ index: { _index: timelessIndexName, _id: 'ufJJX4UBvD7uFsw9L2x4' } },
|
||||
{ eon: 'Phanerozoic', era: 'Mesozoic', period: 'Jurassic' },
|
||||
{ index: { _index: timelessIndexName, _id: 'uvJJX4UBvD7uFsw9L2x4' } },
|
||||
{ eon: 'Phanerozoic', era: 'Paleozoic', period: 'Cambrian' },
|
||||
{ index: { _index: timelessIndexName, _id: 'u_JJX4UBvD7uFsw9L2x4' } },
|
||||
{ eon: 'Proterozoic', era: 'Paleozoic', period: 'Permian' },
|
||||
{ index: { _index: timelessIndexName, _id: 'vPJJX4UBvD7uFsw9L2x4' } },
|
||||
{ eon: 'Archean' },
|
||||
{ index: { _index: timelessIndexName, _id: 'vfJJX4UBvD7uFsw9L2x4' } },
|
||||
{ eon: 'Hadean' },
|
||||
],
|
||||
});
|
||||
};
|
||||
before(async () => {
|
||||
await loadTimelessData();
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await es.indices.delete({
|
||||
index: timelessIndexName,
|
||||
});
|
||||
});
|
||||
|
||||
describe('csv from es|ql', () => {
|
||||
let response: request.Response;
|
||||
let job: ReportApiJSON;
|
||||
let path: string;
|
||||
let csvFile: string;
|
||||
|
||||
before(async () => {
|
||||
const { text, status } = await requestCsv({
|
||||
locatorParams: [
|
||||
{
|
||||
id: DISCOVER_APP_LOCATOR,
|
||||
version: 'reporting',
|
||||
params: {
|
||||
query: { esql: `from ${timelessIndexName} | limit 10` },
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(status).to.eql(200);
|
||||
({ job, path } = JSON.parse(text));
|
||||
await reportingAPI.waitForJobToFinish(path);
|
||||
response = await supertest.get(path);
|
||||
csvFile = response.text;
|
||||
});
|
||||
|
||||
it('job response data is correct', () => {
|
||||
expect(path).to.be.a('string');
|
||||
expect(job).to.be.an('object');
|
||||
expect(job.attempts).equal(0);
|
||||
expectSnapshot({
|
||||
contentType: response.header['content-type'],
|
||||
contentDisposition: response.header['content-disposition'],
|
||||
title: job.payload.title,
|
||||
}).toMatch();
|
||||
});
|
||||
|
||||
it('csv file matches', () => {
|
||||
expectSnapshot(csvFile).toMatch();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('export from timebased data view', () => {
|
||||
const LOGSTASH_DATA_ARCHIVE = 'test/functional/fixtures/es_archiver/logstash_functional';
|
||||
before(async () => {
|
||||
log.info(`loading archives and fixtures`);
|
||||
await esArchiver.load(LOGSTASH_DATA_ARCHIVE);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
log.info(`unloading archives and fixtures`);
|
||||
await esArchiver.unload(LOGSTASH_DATA_ARCHIVE);
|
||||
});
|
||||
|
||||
describe('csv from es|ql', () => {
|
||||
describe('export with time filter', () => {
|
||||
let response: request.Response;
|
||||
let job: ReportApiJSON;
|
||||
let path: string;
|
||||
let csvFile: string;
|
||||
|
||||
before(async () => {
|
||||
const { text, status } = await requestCsv({
|
||||
locatorParams: [
|
||||
{
|
||||
id: 'DISCOVER_APP_LOCATOR',
|
||||
version: '8.13.0',
|
||||
params: {
|
||||
columns: ['@message'],
|
||||
dataViewSpec: {
|
||||
allowHidden: false,
|
||||
allowNoIndex: false,
|
||||
fieldFormats: {},
|
||||
id: '0ed8b65f-ec8f-4061-9d2e-542cd6ff10a6',
|
||||
name: 'logstash-*',
|
||||
runtimeFieldMap: {},
|
||||
sourceFilters: [],
|
||||
timeFieldName: '@timestamp',
|
||||
title: 'logstash-*',
|
||||
},
|
||||
filters: [],
|
||||
index: '0ed8b65f-ec8f-4061-9d2e-542cd6ff10a6',
|
||||
interval: 'auto',
|
||||
query: { esql: 'from logstash-* | sort @timestamp | limit 5' },
|
||||
refreshInterval: { pause: true, value: 60000 },
|
||||
sort: [['@timestamp', 'desc']],
|
||||
timeRange: { from: '2015-09-18T22:00:00.000Z', to: '2015-09-23T22:00:00.000Z' },
|
||||
},
|
||||
},
|
||||
],
|
||||
title: 'Untitled discover search',
|
||||
});
|
||||
expect(status).to.eql(200);
|
||||
({ job, path } = JSON.parse(text));
|
||||
await reportingAPI.waitForJobToFinish(path);
|
||||
response = await supertest.get(path);
|
||||
csvFile = response.text;
|
||||
});
|
||||
|
||||
it('job response data is correct', () => {
|
||||
expect(path).to.be.a('string');
|
||||
expect(job).to.be.an('object');
|
||||
expect(job.attempts).equal(0);
|
||||
expectSnapshot({
|
||||
contentType: response.header['content-type'],
|
||||
contentDisposition: response.header['content-disposition'],
|
||||
title: job.payload.title,
|
||||
}).toMatch();
|
||||
});
|
||||
|
||||
it('csv file matches', () => {
|
||||
expectSnapshot(csvFile).toMatch();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
};
|
|
@ -24,6 +24,7 @@ export default function ({ getService, loadTestFile }: FtrProviderContext) {
|
|||
loadTestFile(require.resolve('./download_csv_dashboard'));
|
||||
loadTestFile(require.resolve('./generate_csv_discover'));
|
||||
loadTestFile(require.resolve('./csv_v2'));
|
||||
loadTestFile(require.resolve('./csv_v2_esql'));
|
||||
loadTestFile(require.resolve('./network_policy'));
|
||||
loadTestFile(require.resolve('./spaces'));
|
||||
loadTestFile(require.resolve('./usage'));
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue