mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
[Reporting] Add max concurrent shards setting to schema (#170344)
## Summary Closes https://github.com/elastic/kibana/issues/161561 This PR exposes the `max_concurrent_shards` setting in the schema for customers for point in time CSV report generation. ### Checklist - [x] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios --------- Co-authored-by: Tim Sullivan <tsullivan@users.noreply.github.com>
This commit is contained in:
parent
39caf945fa
commit
7508ee1ed5
6 changed files with 32 additions and 9 deletions
|
@ -118,6 +118,7 @@ describe('CsvGenerator', () => {
|
|||
useByteOrderMarkEncoding: false,
|
||||
scroll: { size: 500, duration: '30s' },
|
||||
enablePanelActionDownload: true,
|
||||
maxConcurrentShardRequests: 5,
|
||||
};
|
||||
|
||||
searchSourceMock.getField = jest.fn((key: string) => {
|
||||
|
@ -245,6 +246,7 @@ describe('CsvGenerator', () => {
|
|||
useByteOrderMarkEncoding: false,
|
||||
scroll: { size: 500, duration: '30s' },
|
||||
enablePanelActionDownload: true,
|
||||
maxConcurrentShardRequests: 5,
|
||||
};
|
||||
|
||||
mockDataClient.search = jest.fn().mockImplementation(() =>
|
||||
|
@ -345,7 +347,7 @@ describe('CsvGenerator', () => {
|
|||
|
||||
expect(mockDataClient.search).toHaveBeenCalledTimes(10);
|
||||
expect(mockDataClient.search).toBeCalledWith(
|
||||
{ params: { body: {}, ignore_throttled: undefined } },
|
||||
{ params: { body: {}, ignore_throttled: undefined, max_concurrent_shard_requests: 5 } },
|
||||
{ strategy: 'es', transport: { maxRetries: 0, requestTimeout: '30s' } }
|
||||
);
|
||||
|
||||
|
@ -356,7 +358,7 @@ describe('CsvGenerator', () => {
|
|||
index: 'logstash-*',
|
||||
keep_alive: '30s',
|
||||
},
|
||||
{ maxRetries: 0, requestTimeout: '30s' }
|
||||
{ maxConcurrentShardRequests: 5, maxRetries: 0, requestTimeout: '30s' }
|
||||
);
|
||||
|
||||
expect(mockEsClient.asCurrentUser.closePointInTime).toHaveBeenCalledTimes(1);
|
||||
|
@ -763,6 +765,7 @@ describe('CsvGenerator', () => {
|
|||
useByteOrderMarkEncoding: false,
|
||||
scroll: { size: 500, duration: '30s' },
|
||||
enablePanelActionDownload: true,
|
||||
maxConcurrentShardRequests: 5,
|
||||
};
|
||||
mockDataClient.search = jest.fn().mockImplementation(() =>
|
||||
Rx.of({
|
||||
|
@ -833,7 +836,7 @@ describe('CsvGenerator', () => {
|
|||
index: 'logstash-*',
|
||||
keep_alive: '30s',
|
||||
},
|
||||
{ maxRetries: 0, requestTimeout: '30s' }
|
||||
{ maxConcurrentShardRequests: 5, maxRetries: 0, requestTimeout: '30s' }
|
||||
);
|
||||
|
||||
expect(mockEsClient.asCurrentUser.openPointInTime).toHaveBeenCalledWith(
|
||||
|
@ -843,13 +846,14 @@ describe('CsvGenerator', () => {
|
|||
index: 'logstash-*',
|
||||
keep_alive: '30s',
|
||||
},
|
||||
{ maxRetries: 0, requestTimeout: '30s' }
|
||||
{ maxConcurrentShardRequests: 5, maxRetries: 0, requestTimeout: '30s' }
|
||||
);
|
||||
|
||||
expect(mockDataClient.search).toBeCalledWith(
|
||||
{
|
||||
params: {
|
||||
body: {},
|
||||
max_concurrent_shard_requests: 5,
|
||||
},
|
||||
},
|
||||
{ strategy: 'es', transport: { maxRetries: 0, requestTimeout: '30s' } }
|
||||
|
|
|
@ -12,6 +12,7 @@ import type { Writable } from 'stream';
|
|||
import { errors as esErrors, estypes } from '@elastic/elasticsearch';
|
||||
import type { IScopedClusterClient, IUiSettingsClient, Logger } from '@kbn/core/server';
|
||||
import type {
|
||||
IEsSearchRequest,
|
||||
IKibanaSearchResponse,
|
||||
ISearchSource,
|
||||
ISearchStartSearchSource,
|
||||
|
@ -66,7 +67,11 @@ export class CsvGenerator {
|
|||
) {}
|
||||
|
||||
private async openPointInTime(indexPatternTitle: string, settings: CsvExportSettings) {
|
||||
const { duration } = settings.scroll;
|
||||
const {
|
||||
includeFrozen,
|
||||
maxConcurrentShardRequests,
|
||||
scroll: { duration },
|
||||
} = settings;
|
||||
let pitId: string | undefined;
|
||||
this.logger.debug(`Requesting PIT for: [${indexPatternTitle}]...`);
|
||||
try {
|
||||
|
@ -77,11 +82,12 @@ export class CsvGenerator {
|
|||
keep_alive: duration,
|
||||
ignore_unavailable: true,
|
||||
// @ts-expect-error ignore_throttled is not in the type definition, but it is accepted by es
|
||||
ignore_throttled: settings.includeFrozen ? false : undefined, // "true" will cause deprecation warnings logged in ES
|
||||
ignore_throttled: includeFrozen ? false : undefined, // "true" will cause deprecation warnings logged in ES
|
||||
},
|
||||
{
|
||||
requestTimeout: duration,
|
||||
maxRetries: 0,
|
||||
maxConcurrentShardRequests,
|
||||
}
|
||||
);
|
||||
pitId = response.id;
|
||||
|
@ -135,7 +141,7 @@ export class CsvGenerator {
|
|||
settings: CsvExportSettings,
|
||||
searchAfter?: estypes.SortResults
|
||||
) {
|
||||
const { scroll: scrollSettings } = settings;
|
||||
const { scroll: scrollSettings, maxConcurrentShardRequests } = settings;
|
||||
searchSource.setField('size', scrollSettings.size);
|
||||
|
||||
if (searchAfter) {
|
||||
|
@ -153,8 +159,14 @@ export class CsvGenerator {
|
|||
throw new Error('Could not retrieve the search body!');
|
||||
}
|
||||
|
||||
const searchParams = { params: { body: searchBody } };
|
||||
let results: estypes.SearchResponse<unknown>;
|
||||
const searchParams: IEsSearchRequest = {
|
||||
params: {
|
||||
body: searchBody,
|
||||
max_concurrent_shard_requests: maxConcurrentShardRequests,
|
||||
},
|
||||
};
|
||||
|
||||
let results: estypes.SearchResponse<unknown> | undefined;
|
||||
try {
|
||||
const { rawResponse, ...rawDetails } = await lastValueFrom(
|
||||
this.clients.data.search(searchParams, {
|
||||
|
|
|
@ -30,6 +30,7 @@ describe('getExportSettings', () => {
|
|||
maxSizeBytes: 180000,
|
||||
scroll: { size: 500, duration: '30s' },
|
||||
useByteOrderMarkEncoding: false,
|
||||
maxConcurrentShardRequests: 5,
|
||||
enablePanelActionDownload: true,
|
||||
};
|
||||
const logger = loggingSystemMock.createLogger();
|
||||
|
@ -62,6 +63,7 @@ describe('getExportSettings', () => {
|
|||
"escapeFormulaValues": false,
|
||||
"escapeValue": [Function],
|
||||
"includeFrozen": false,
|
||||
"maxConcurrentShardRequests": 5,
|
||||
"maxSizeBytes": 180000,
|
||||
"scroll": Object {
|
||||
"duration": "30s",
|
||||
|
|
|
@ -31,6 +31,7 @@ export interface CsvExportSettings {
|
|||
escapeFormulaValues: boolean;
|
||||
escapeValue: (value: string) => string;
|
||||
includeFrozen: boolean;
|
||||
maxConcurrentShardRequests: number;
|
||||
}
|
||||
|
||||
export const getExportSettings = async (
|
||||
|
@ -82,5 +83,6 @@ export const getExportSettings = async (
|
|||
checkForFormulas: config.checkForFormulas,
|
||||
escapeFormulaValues,
|
||||
escapeValue,
|
||||
maxConcurrentShardRequests: config.maxConcurrentShardRequests,
|
||||
};
|
||||
};
|
||||
|
|
|
@ -9,6 +9,7 @@ Object {
|
|||
"checkForFormulas": true,
|
||||
"enablePanelActionDownload": true,
|
||||
"escapeFormulaValues": false,
|
||||
"maxConcurrentShardRequests": 5,
|
||||
"maxSizeBytes": ByteSizeValue {
|
||||
"valueInBytes": 262144000,
|
||||
},
|
||||
|
@ -70,6 +71,7 @@ Object {
|
|||
"checkForFormulas": true,
|
||||
"enablePanelActionDownload": true,
|
||||
"escapeFormulaValues": false,
|
||||
"maxConcurrentShardRequests": 5,
|
||||
"maxSizeBytes": ByteSizeValue {
|
||||
"valueInBytes": 262144000,
|
||||
},
|
||||
|
|
|
@ -75,6 +75,7 @@ const CsvSchema = schema.object({
|
|||
}),
|
||||
size: schema.number({ defaultValue: 500 }),
|
||||
}),
|
||||
maxConcurrentShardRequests: schema.number({ defaultValue: 5 }),
|
||||
});
|
||||
|
||||
const EncryptionKeySchema = schema.conditional(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue