mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 01:38:56 -04:00
* Fix date formatting on server for CSV export * remove stray console.log * allow async to act in parallel * Log a warning when "Browser" is the timezone
This commit is contained in:
parent
74f4e6a509
commit
096dec4ce8
6 changed files with 148 additions and 43 deletions
|
@ -0,0 +1,82 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch B.V. under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch B.V. licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import { memoize } from 'lodash';
|
||||
import moment from 'moment-timezone';
|
||||
|
||||
export function createDateOnServerFormat(FieldFormat) {
|
||||
return class DateFormat extends FieldFormat {
|
||||
constructor(params, getConfig) {
|
||||
super(params);
|
||||
|
||||
this.getConfig = getConfig;
|
||||
this._memoizedConverter = memoize(val => {
|
||||
if (val == null) {
|
||||
return '-';
|
||||
}
|
||||
|
||||
/* On the server, importing moment returns a new instance. Unlike on
|
||||
* the client side, it doesn't have the dateFormat:tz configuration
|
||||
* baked in.
|
||||
* We need to set the timezone manually here. The date is taken in as
|
||||
* UTC and converted into the desired timezone. */
|
||||
let date;
|
||||
if (this._timeZone === 'Browser') {
|
||||
// Assume a warning has been logged this can be unpredictable. It
|
||||
// would be too verbose to log anything here.
|
||||
date = moment.utc(val);
|
||||
} else {
|
||||
date = moment.utc(val).tz(this._timeZone);
|
||||
}
|
||||
|
||||
if (date.isValid()) {
|
||||
return date.format(this._memoizedPattern);
|
||||
} else {
|
||||
return val;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getParamDefaults() {
|
||||
return {
|
||||
pattern: this.getConfig('dateFormat'),
|
||||
timezone: this.getConfig('dateFormat:tz'),
|
||||
};
|
||||
}
|
||||
|
||||
_convert(val) {
|
||||
// don't give away our ref to converter so we can hot-swap when config changes
|
||||
const pattern = this.param('pattern');
|
||||
const timezone = this.param('timezone');
|
||||
|
||||
const timezoneChanged = this._timeZone !== timezone;
|
||||
const datePatternChanged = this._memoizedPattern !== pattern;
|
||||
if (timezoneChanged || datePatternChanged) {
|
||||
this._timeZone = timezone;
|
||||
this._memoizedPattern = pattern;
|
||||
}
|
||||
|
||||
return this._memoizedConverter(val);
|
||||
}
|
||||
|
||||
static id = 'date';
|
||||
static title = 'Date';
|
||||
static fieldType = 'date';
|
||||
};
|
||||
}
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
import { createUrlFormat } from '../../common/field_formats/types/url';
|
||||
import { createBytesFormat } from '../../common/field_formats/types/bytes';
|
||||
import { createDateFormat } from '../../common/field_formats/types/date';
|
||||
import { createDateOnServerFormat } from '../../common/field_formats/types/date_server';
|
||||
import { createDurationFormat } from '../../common/field_formats/types/duration';
|
||||
import { createIpFormat } from '../../common/field_formats/types/ip';
|
||||
import { createNumberFormat } from '../../common/field_formats/types/number';
|
||||
|
@ -34,7 +34,7 @@ import { createStaticLookupFormat } from '../../common/field_formats/types/stati
|
|||
export function registerFieldFormats(server) {
|
||||
server.registerFieldFormat(createUrlFormat);
|
||||
server.registerFieldFormat(createBytesFormat);
|
||||
server.registerFieldFormat(createDateFormat);
|
||||
server.registerFieldFormat(createDateOnServerFormat);
|
||||
server.registerFieldFormat(createDurationFormat);
|
||||
server.registerFieldFormat(createIpFormat);
|
||||
server.registerFieldFormat(createNumberFormat);
|
||||
|
|
|
@ -15,7 +15,10 @@ function executeJobFn(server) {
|
|||
const { callWithRequest } = server.plugins.elasticsearch.getCluster('data');
|
||||
const crypto = cryptoFactory(server);
|
||||
const config = server.config();
|
||||
const logger = createTaggedLogger(server, ['reporting', 'csv', 'debug']);
|
||||
const logger = {
|
||||
debug: createTaggedLogger(server, ['reporting', 'csv', 'debug']),
|
||||
warn: createTaggedLogger(server, ['reporting', 'csv', 'warning']),
|
||||
};
|
||||
const generateCsv = createGenerateCsv(logger);
|
||||
const serverBasePath = config.get('server.basePath');
|
||||
|
||||
|
@ -27,17 +30,23 @@ function executeJobFn(server) {
|
|||
metaFields,
|
||||
conflictedTypesFields,
|
||||
headers: serializedEncryptedHeaders,
|
||||
basePath
|
||||
basePath,
|
||||
} = job;
|
||||
|
||||
let decryptedHeaders;
|
||||
try {
|
||||
decryptedHeaders = await crypto.decrypt(serializedEncryptedHeaders);
|
||||
} catch (e) {
|
||||
throw new Error(i18n.translate('xpack.reporting.exportTypes.csv.executeJob.failedToDecryptReportJobDataErrorMessage', {
|
||||
defaultMessage: 'Failed to decrypt report job data. Please ensure that {encryptionKey} is set and re-generate this report.',
|
||||
values: { encryptionKey: 'xpack.reporting.encryptionKey' }
|
||||
}));
|
||||
throw new Error(
|
||||
i18n.translate(
|
||||
'xpack.reporting.exportTypes.csv.executeJob.failedToDecryptReportJobDataErrorMessage',
|
||||
{
|
||||
defaultMessage:
|
||||
'Failed to decrypt report job data. Please ensure that {encryptionKey} is set and re-generate this report.',
|
||||
values: { encryptionKey: 'xpack.reporting.encryptionKey' },
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
const fakeRequest = {
|
||||
|
@ -53,32 +62,47 @@ function executeJobFn(server) {
|
|||
};
|
||||
const savedObjects = server.savedObjects;
|
||||
const savedObjectsClient = savedObjects.getScopedSavedObjectsClient(fakeRequest);
|
||||
const uiSettings = server.uiSettingsServiceFactory({
|
||||
savedObjectsClient
|
||||
const uiConfig = server.uiSettingsServiceFactory({
|
||||
savedObjectsClient,
|
||||
});
|
||||
|
||||
const fieldFormats = await server.fieldFormatServiceFactory(uiSettings);
|
||||
const formatsMap = fieldFormatMapFactory(indexPatternSavedObject, fieldFormats);
|
||||
const [formatsMap, uiSettings] = await Promise.all([
|
||||
(async () => {
|
||||
const fieldFormats = await server.fieldFormatServiceFactory(uiConfig);
|
||||
return fieldFormatMapFactory(indexPatternSavedObject, fieldFormats);
|
||||
})(),
|
||||
(async () => {
|
||||
const [separator, quoteValues, timezone] = await Promise.all([
|
||||
uiConfig.get('csv:separator'),
|
||||
uiConfig.get('csv:quoteValues'),
|
||||
uiConfig.get('dateFormat:tz'),
|
||||
]);
|
||||
|
||||
const separator = await uiSettings.get('csv:separator');
|
||||
const quoteValues = await uiSettings.get('csv:quoteValues');
|
||||
const maxSizeBytes = config.get('xpack.reporting.csv.maxSizeBytes');
|
||||
const scroll = config.get('xpack.reporting.csv.scroll');
|
||||
if (timezone === 'Browser') {
|
||||
logger.warn(`Kibana Advanced Setting "dateFormat:tz" is set to "Browser". Dates will be formatted as UTC to avoid ambiguity.`);
|
||||
}
|
||||
|
||||
return {
|
||||
separator,
|
||||
quoteValues,
|
||||
timezone,
|
||||
};
|
||||
})(),
|
||||
]);
|
||||
|
||||
const { content, maxSizeReached, size } = await generateCsv({
|
||||
searchRequest,
|
||||
fields,
|
||||
formatsMap,
|
||||
metaFields,
|
||||
conflictedTypesFields,
|
||||
callEndpoint,
|
||||
cancellationToken,
|
||||
formatsMap,
|
||||
settings: {
|
||||
separator,
|
||||
quoteValues,
|
||||
maxSizeBytes,
|
||||
scroll
|
||||
}
|
||||
...uiSettings,
|
||||
maxSizeBytes: config.get('xpack.reporting.csv.maxSizeBytes'),
|
||||
scroll: config.get('xpack.reporting.csv.scroll'),
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
|
@ -8,24 +8,23 @@ import { isObject, isNull, isUndefined } from 'lodash';
|
|||
|
||||
export function createFormatCsvValues(escapeValue, separator, fields, formatsMap) {
|
||||
return function formatCsvValues(values) {
|
||||
return fields.map((field) => {
|
||||
let value = values[field];
|
||||
return fields
|
||||
.map(field => {
|
||||
const value = values[field];
|
||||
if (isNull(value) || isUndefined(value)) {
|
||||
return '';
|
||||
}
|
||||
|
||||
if (isNull(value) || isUndefined(value)) {
|
||||
return '';
|
||||
}
|
||||
let formattedValue = value;
|
||||
if (formatsMap.has(field)) {
|
||||
const formatter = formatsMap.get(field);
|
||||
formattedValue = formatter.convert(value);
|
||||
}
|
||||
|
||||
if (formatsMap.has(field)) {
|
||||
const formatter = formatsMap.get(field);
|
||||
value = formatter.convert(value);
|
||||
}
|
||||
|
||||
if (isObject(value)) {
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
|
||||
return value.toString();
|
||||
})
|
||||
return formattedValue;
|
||||
})
|
||||
.map(value => (isObject(value) ? JSON.stringify(value) : value))
|
||||
.map(value => value.toString())
|
||||
.map(escapeValue)
|
||||
.join(separator);
|
||||
};
|
||||
|
|
|
@ -49,7 +49,7 @@ export function createGenerateCsv(logger) {
|
|||
}
|
||||
|
||||
if (!builder.tryAppend(formatCsvValues(flattenHit(hit)) + '\n')) {
|
||||
logger('max Size Reached');
|
||||
logger.warn('max Size Reached');
|
||||
maxSizeReached = true;
|
||||
cancellationToken.cancel();
|
||||
break;
|
||||
|
@ -59,7 +59,7 @@ export function createGenerateCsv(logger) {
|
|||
await iterator.return();
|
||||
}
|
||||
const size = builder.getSizeInBytes();
|
||||
logger(`finished generating, total size in bytes: ${size}`);
|
||||
logger.debug(`finished generating, total size in bytes: ${size}`);
|
||||
|
||||
return {
|
||||
content: builder.getString(),
|
||||
|
|
|
@ -30,7 +30,7 @@ async function parseResponse(request) {
|
|||
|
||||
export function createHitIterator(logger) {
|
||||
return async function* hitIterator(scrollSettings, callEndpoint, searchRequest, cancellationToken) {
|
||||
logger('executing search request');
|
||||
logger.debug('executing search request');
|
||||
function search(index, body) {
|
||||
return parseResponse(callEndpoint('search', {
|
||||
index,
|
||||
|
@ -41,7 +41,7 @@ export function createHitIterator(logger) {
|
|||
}
|
||||
|
||||
function scroll(scrollId) {
|
||||
logger('executing scroll request');
|
||||
logger.debug('executing scroll request');
|
||||
return parseResponse(callEndpoint('scroll', {
|
||||
scrollId,
|
||||
scroll: scrollSettings.duration
|
||||
|
@ -49,7 +49,7 @@ export function createHitIterator(logger) {
|
|||
}
|
||||
|
||||
function clearScroll(scrollId) {
|
||||
logger('executing clearScroll request');
|
||||
logger.debug('executing clearScroll request');
|
||||
return callEndpoint('clearScroll', {
|
||||
scrollId: [ scrollId ]
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue