mirror of
https://github.com/elastic/kibana.git
synced 2025-04-23 09:19:04 -04:00
[Lens] Make default dimension labels auto translate (#159089)
## Summary Fixes #147107 This PR fixes two issues with locale and default dimension labels: * generate default (non-custom) dimension labels at expression runtime rather than at dimension generation * make date histogram interval unit support locale Test notes: * create a dashboard with some panels with default labels, others with custom ones * then change the i18n configuration in the `kibana.yml` file and reload the dashboard * default labels + intervals should be now translated * custom labels should not change Here's a dashboard using all 4 editors with a date histogram charts. This PR is expected to have full effect on Lens and some effects on Agg-based visualization, no effects on Vega and TSVB: <img width="1508" alt="Screenshot 2023-06-06 at 10 21 03" src="812c7176
-5a81-44bc-8619-7436a24fe608"> <img width="1507" alt="Screenshot 2023-06-06 at 10 19 01" src="34b8957d
-eb07-48a6-8049-88b9e5e165e4"> <img width="1508" alt="Screenshot 2023-06-06 at 10 17 59" src="fde657fb
-c9f7-4860-bc92-d4a3806fc16e"> <img width="1506" alt="Screenshot 2023-06-06 at 10 14 39" src="93c26f31
-e98d-4985-a179-2c86e1ddb875"> ## Note <details> <summary>Previous issue with single unit</summary> There's a breaking change which affects the unit rate when it's a single value unit: <img width="1505" alt="Screenshot 2023-06-06 at 10 23 20" src="52eceaa5
-78aa-4903-96be-8f33fd0360e9"> <img width="1509" alt="Screenshot 2023-06-06 at 10 22 34" src="f8625ec7
-69c8-4b5d-ac87-50b9e56e282f"> The change is `timestamp per hour` to `timestamp per one hour`. ❌ It would be possible to handle few of these cases with manual parsing of the string for some languages (i.e. `durationDescription.split(' ')[1]`), but that is not a universal fix as it won't work for languages without spaces. TSVB "solves" reduces a lot this problem by stepping down the unit avoiding to use single value unit: `1 hour` => `60 minutes`. Perhaps a similar solution could be leveraged as well? I would still be some sort of major change. </details> The previous issue with single unit has been solved and will be completely visible once the i18n will be applied. ### Checklist Delete any items that are not applicable to this PR. - [ ] Any text added follows [EUI's writing guidelines](https://elastic.github.io/eui/#/guidelines/writing), uses sentence case text and includes [i18n support](https://github.com/elastic/kibana/blob/main/packages/kbn-i18n/README.md) - [ ] [Documentation](https://www.elastic.co/guide/en/kibana/master/development-documentation.html) was added for features that require explanation or tutorials - [ ] [Unit or functional tests](https://www.elastic.co/guide/en/kibana/master/development-tests.html) were updated or added to match the most common scenarios - [ ] Any UI touched in this PR is usable by keyboard only (learn more about [keyboard accessibility](https://webaim.org/techniques/keyboard/)) - [ ] Any UI touched in this PR does not create any new axe failures (run axe in browser: [FF](https://addons.mozilla.org/en-US/firefox/addon/axe-devtools/), [Chrome](https://chrome.google.com/webstore/detail/axe-web-accessibility-tes/lhdoppojpmngadmnindnejefpokejbdd?hl=en-US)) - [ ] If a plugin configuration key changed, check if it needs to be allowlisted in the cloud and added to the [docker list](https://github.com/elastic/kibana/blob/main/src/dev/build/tasks/os_packages/docker_generator/resources/base/bin/kibana-docker) - [ ] This renders correctly on smaller devices using a responsive layout. (You can test this [in your browser](https://www.browserstack.com/guide/responsive-testing-on-local-server)) - [ ] This was checked for [cross-browser compatibility](https://www.elastic.co/support/matrix#matrix_browsers) ### Risk Matrix Delete this section if it is not applicable to this PR. Before closing this PR, invite QA, stakeholders, and other developers to identify risks that should be tested prior to the change/feature release. When forming the risk matrix, consider some of the following examples and how they may potentially impact the change: | Risk | Probability | Severity | Mitigation/Notes | |---------------------------|-------------|----------|-------------------------| | Multiple Spaces—unexpected behavior in non-default Kibana Space. | Low | High | Integration tests will verify that all features are still supported in non-default Kibana Space and when user switches between spaces. | | Multiple nodes—Elasticsearch polling might have race conditions when multiple Kibana nodes are polling for the same tasks. | High | Low | Tasks are idempotent, so executing them multiple times will not result in logical error, but will degrade performance. To test for this case we add plenty of unit tests around this logic and document manual testing procedure. | | Code should gracefully handle cases when feature X or plugin Y are disabled. | Medium | High | Unit tests will verify that any feature flag or plugin combination still results in our service operational. | | [See more potential risk examples](https://github.com/elastic/kibana/blob/main/RISK_MATRIX.mdx) | ### For maintainers - [ ] This was checked for breaking API changes and was [labeled appropriately](https://www.elastic.co/guide/en/kibana/master/contributing.html#kibana-release-notes-process) --------- Co-authored-by: Stratoula Kalafateli <efstratia.kalafateli@elastic.co>
This commit is contained in:
parent
e56fece392
commit
4265ad2bea
18 changed files with 248 additions and 97 deletions
|
@ -9,6 +9,7 @@
|
|||
import moment from 'moment';
|
||||
import dateMath, { Unit } from '@kbn/datemath';
|
||||
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { parseEsInterval } from '../../../utils';
|
||||
|
||||
const unitsDesc = dateMath.unitsDesc;
|
||||
|
@ -71,3 +72,90 @@ export function convertIntervalToEsInterval(interval: string): EsInterval {
|
|||
expression: interval,
|
||||
};
|
||||
}
|
||||
|
||||
declare module 'moment' {
|
||||
interface Locale {
|
||||
_config: moment.LocaleSpecification;
|
||||
}
|
||||
}
|
||||
|
||||
// Below 5 seconds the "humanize" call returns the "few seconds" sentence, which is not ok for ms
|
||||
// This special config rewrite makes it sure to have precision also for sub-seconds durations
|
||||
// ref: https://github.com/moment/moment/issues/348
|
||||
export function getPreciseDurationDescription(
|
||||
intervalValue: number,
|
||||
unit: moment.unitOfTime.Base
|
||||
): string {
|
||||
// moment cannot format anything below seconds, so this requires a manual handling
|
||||
if (unit === 'millisecond') {
|
||||
return intervalValue === 1
|
||||
? i18n.translate('data.search.aggs.buckets.intervalOptions.millisecond', {
|
||||
defaultMessage: 'millisecond',
|
||||
})
|
||||
: i18n.translate('data.search.aggs.buckets.intervalOptions.milliseconds', {
|
||||
defaultMessage: '{n} milliseconds',
|
||||
values: { n: intervalValue },
|
||||
});
|
||||
}
|
||||
// Save default values
|
||||
const roundingDefault = moment.relativeTimeRounding();
|
||||
const units = [
|
||||
{ unit: 'm', value: 60 }, // This should prevent to round up 45 minutes to "an hour"
|
||||
{ unit: 's', value: 60 }, // this should prevent to round up 45 seconds to "a minute"
|
||||
{ unit: 'ss', value: 0 }, // This should prevent to round anything below 5 seconds to "few seconds"
|
||||
{ unit: 'ms', value: 1000 }, // this should render precision at milliseconds level
|
||||
];
|
||||
const defaultValues = units.map(({ unit: u }) => moment.relativeTimeThreshold(u) as number);
|
||||
|
||||
const DIGITS = 2;
|
||||
const powValue = Math.pow(10, DIGITS);
|
||||
moment.relativeTimeRounding((t) => {
|
||||
return Math.round(t * powValue) / powValue;
|
||||
});
|
||||
units.forEach(({ unit: u, value }) => moment.relativeTimeThreshold(u, value));
|
||||
|
||||
const defaultLocaleConfig = moment.localeData()._config;
|
||||
moment.updateLocale(moment.locale(), {
|
||||
relativeTime: {
|
||||
ss: (n: number): string => {
|
||||
return n === 1
|
||||
? i18n.translate('data.search.aggs.buckets.intervalOptions.second', {
|
||||
defaultMessage: 'second',
|
||||
})
|
||||
: i18n.translate('data.search.aggs.buckets.intervalOptions.seconds', {
|
||||
defaultMessage: '{n} seconds',
|
||||
values: { n },
|
||||
});
|
||||
},
|
||||
m: i18n.translate('data.search.aggs.buckets.intervalOptions.minute', {
|
||||
defaultMessage: 'minute',
|
||||
}),
|
||||
h: i18n.translate('data.search.aggs.buckets.intervalOptions.hourly', {
|
||||
defaultMessage: 'hour',
|
||||
}),
|
||||
d: i18n.translate('data.search.aggs.buckets.intervalOptions.daily', {
|
||||
defaultMessage: 'day',
|
||||
}),
|
||||
w: i18n.translate('data.search.aggs.buckets.intervalOptions.weekly', {
|
||||
defaultMessage: 'week',
|
||||
}),
|
||||
M: i18n.translate('data.search.aggs.buckets.intervalOptions.monthly', {
|
||||
defaultMessage: 'month',
|
||||
}),
|
||||
y: i18n.translate('data.search.aggs.buckets.intervalOptions.yearly', {
|
||||
defaultMessage: 'year',
|
||||
}),
|
||||
},
|
||||
});
|
||||
|
||||
// Execute the format/humanize call in the callback
|
||||
const result = moment.duration(intervalValue, unit).locale(i18n.getLocale()).humanize();
|
||||
|
||||
// restore all the default values now in moment to not break it
|
||||
units.forEach(({ unit: u }, i) => moment.relativeTimeThreshold(unit, defaultValues[i]));
|
||||
moment.relativeTimeRounding(roundingDefault);
|
||||
|
||||
// restore all the default values now in moment to not break it
|
||||
moment.updateLocale(moment.locale(), defaultLocaleConfig);
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ import {
|
|||
convertDurationToNormalizedEsInterval,
|
||||
convertIntervalToEsInterval,
|
||||
EsInterval,
|
||||
getPreciseDurationDescription,
|
||||
} from './calc_es_interval';
|
||||
import { autoInterval } from '../../_interval_options';
|
||||
|
||||
|
@ -272,22 +273,19 @@ export class TimeBuckets {
|
|||
? convertDurationToNormalizedEsInterval(interval, originalUnit)
|
||||
: convertIntervalToEsInterval(this._originalInterval);
|
||||
|
||||
const prettyUnits = moment.normalizeUnits(esInterval.unit);
|
||||
const prettyUnits = moment.normalizeUnits(esInterval.unit) as moment.unitOfTime.Base;
|
||||
const durationDescription = getPreciseDurationDescription(esInterval.value, prettyUnits);
|
||||
|
||||
return Object.assign(interval, {
|
||||
description:
|
||||
esInterval.value === 1 ? prettyUnits : esInterval.value + ' ' + prettyUnits + 's',
|
||||
description: durationDescription,
|
||||
esValue: esInterval.value,
|
||||
esUnit: esInterval.unit,
|
||||
expression: esInterval.expression,
|
||||
});
|
||||
};
|
||||
|
||||
if (useNormalizedEsInterval) {
|
||||
return decorateInterval(maybeScaleInterval(parsedInterval));
|
||||
} else {
|
||||
return decorateInterval(parsedInterval);
|
||||
}
|
||||
return decorateInterval(
|
||||
useNormalizedEsInterval ? maybeScaleInterval(parsedInterval) : parsedInterval
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -247,32 +247,36 @@ describe('IndexPattern Data Source', () => {
|
|||
dataType: 'number',
|
||||
isBucketed: false,
|
||||
label: 'Foo',
|
||||
customLabel: true,
|
||||
operationType: 'count',
|
||||
sourceField: '___records___',
|
||||
};
|
||||
const map = FormBasedDatasource.uniqueLabels({
|
||||
layers: {
|
||||
a: {
|
||||
columnOrder: ['a', 'b'],
|
||||
columns: {
|
||||
a: col,
|
||||
b: col,
|
||||
},
|
||||
indexPatternId: 'foo',
|
||||
},
|
||||
b: {
|
||||
columnOrder: ['c', 'd'],
|
||||
columns: {
|
||||
c: col,
|
||||
d: {
|
||||
...col,
|
||||
label: 'Foo [1]',
|
||||
const map = FormBasedDatasource.uniqueLabels(
|
||||
{
|
||||
layers: {
|
||||
a: {
|
||||
columnOrder: ['a', 'b'],
|
||||
columns: {
|
||||
a: col,
|
||||
b: col,
|
||||
},
|
||||
indexPatternId: 'foo',
|
||||
},
|
||||
b: {
|
||||
columnOrder: ['c', 'd'],
|
||||
columns: {
|
||||
c: col,
|
||||
d: {
|
||||
...col,
|
||||
label: 'Foo [1]',
|
||||
},
|
||||
},
|
||||
indexPatternId: 'foo',
|
||||
},
|
||||
indexPatternId: 'foo',
|
||||
},
|
||||
},
|
||||
} as unknown as FormBasedPrivateState);
|
||||
} as unknown as FormBasedPrivateState,
|
||||
indexPatterns
|
||||
);
|
||||
|
||||
expect(map).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
|
@ -583,7 +587,7 @@ describe('IndexPattern Data Source', () => {
|
|||
Object {
|
||||
"arguments": Object {
|
||||
"idMap": Array [
|
||||
"{\\"col-0-0\\":[{\\"label\\":\\"Count of records\\",\\"dataType\\":\\"number\\",\\"isBucketed\\":false,\\"sourceField\\":\\"___records___\\",\\"operationType\\":\\"count\\",\\"id\\":\\"col1\\"}],\\"col-1-1\\":[{\\"label\\":\\"Date\\",\\"dataType\\":\\"date\\",\\"isBucketed\\":true,\\"operationType\\":\\"date_histogram\\",\\"sourceField\\":\\"timestamp\\",\\"params\\":{\\"interval\\":\\"1d\\"},\\"id\\":\\"col2\\"}]}",
|
||||
"{\\"col-0-0\\":[{\\"label\\":\\"Count of records\\",\\"dataType\\":\\"number\\",\\"isBucketed\\":false,\\"sourceField\\":\\"___records___\\",\\"operationType\\":\\"count\\",\\"id\\":\\"col1\\"}],\\"col-1-1\\":[{\\"label\\":\\"timestampLabel\\",\\"dataType\\":\\"date\\",\\"isBucketed\\":true,\\"operationType\\":\\"date_histogram\\",\\"sourceField\\":\\"timestamp\\",\\"params\\":{\\"interval\\":\\"1d\\"},\\"id\\":\\"col2\\"}]}",
|
||||
],
|
||||
},
|
||||
"function": "lens_map_to_columns",
|
||||
|
@ -1127,7 +1131,7 @@ describe('IndexPattern Data Source', () => {
|
|||
"col1",
|
||||
],
|
||||
"outputColumnName": Array [
|
||||
"Count of records",
|
||||
"Count of records per hour",
|
||||
],
|
||||
"reducedTimeRange": Array [],
|
||||
"targetUnit": Array [
|
||||
|
@ -1570,7 +1574,7 @@ describe('IndexPattern Data Source', () => {
|
|||
"dataType": "string",
|
||||
"id": "col1",
|
||||
"isBucketed": true,
|
||||
"label": "My Op",
|
||||
"label": "Top 5 values of Missing field",
|
||||
"operationType": "terms",
|
||||
"params": Object {
|
||||
"orderBy": Object {
|
||||
|
@ -1588,7 +1592,7 @@ describe('IndexPattern Data Source', () => {
|
|||
"dataType": "number",
|
||||
"id": "col2",
|
||||
"isBucketed": false,
|
||||
"label": "Count of records",
|
||||
"label": "Count of records per hour",
|
||||
"operationType": "count",
|
||||
"sourceField": "___records___",
|
||||
"timeScale": "h",
|
||||
|
@ -1597,7 +1601,7 @@ describe('IndexPattern Data Source', () => {
|
|||
"dataType": "number",
|
||||
"id": "col3",
|
||||
"isBucketed": false,
|
||||
"label": "Count of records",
|
||||
"label": "Count of records per hour",
|
||||
"operationType": "count",
|
||||
"sourceField": "___records___",
|
||||
"timeScale": "h",
|
||||
|
@ -1606,7 +1610,7 @@ describe('IndexPattern Data Source', () => {
|
|||
"dataType": "number",
|
||||
"id": "col4",
|
||||
"isBucketed": false,
|
||||
"label": "Count of records",
|
||||
"label": "Count of records per hour",
|
||||
"operationType": "count",
|
||||
"sourceField": "___records___",
|
||||
"timeScale": "h",
|
||||
|
@ -2154,12 +2158,15 @@ describe('IndexPattern Data Source', () => {
|
|||
describe('getOperationForColumnId', () => {
|
||||
it('should get an operation for col1', () => {
|
||||
expect(publicAPI.getOperationForColumnId('col1')).toEqual({
|
||||
label: 'My Op',
|
||||
label: 'Top 5 values of Missing field',
|
||||
dataType: 'string',
|
||||
isBucketed: true,
|
||||
isStaticValue: false,
|
||||
hasTimeShift: false,
|
||||
hasReducedTimeRange: false,
|
||||
scale: undefined,
|
||||
sortingHint: undefined,
|
||||
interval: undefined,
|
||||
} as OperationDescriptor);
|
||||
});
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ import type {
|
|||
UserMessage,
|
||||
FrameDatasourceAPI,
|
||||
StateSetter,
|
||||
IndexPatternMap,
|
||||
} from '../../types';
|
||||
import {
|
||||
changeIndexPattern,
|
||||
|
@ -498,7 +499,7 @@ export function getFormBasedDatasource({
|
|||
);
|
||||
},
|
||||
|
||||
uniqueLabels(state: FormBasedPrivateState) {
|
||||
uniqueLabels(state: FormBasedPrivateState, indexPatternsMap: IndexPatternMap) {
|
||||
const layers = state.layers;
|
||||
const columnLabelMap = {} as Record<string, string>;
|
||||
|
||||
|
@ -509,7 +510,15 @@ export function getFormBasedDatasource({
|
|||
return;
|
||||
}
|
||||
Object.entries(layer.columns).forEach(([columnId, column]) => {
|
||||
columnLabelMap[columnId] = uniqueLabelGenerator(column.label);
|
||||
columnLabelMap[columnId] = uniqueLabelGenerator(
|
||||
column.customLabel
|
||||
? column.label
|
||||
: operationDefinitionMap[column.operationType].getDefaultLabel(
|
||||
column,
|
||||
indexPatternsMap[layer.indexPatternId],
|
||||
layer.columns
|
||||
)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -520,7 +529,7 @@ export function getFormBasedDatasource({
|
|||
domElement: Element,
|
||||
props: DatasourceDimensionTriggerProps<FormBasedPrivateState>
|
||||
) => {
|
||||
const columnLabelMap = formBasedDatasource.uniqueLabels(props.state);
|
||||
const columnLabelMap = formBasedDatasource.uniqueLabels(props.state, props.indexPatterns);
|
||||
const uniqueLabel = columnLabelMap[props.columnId];
|
||||
const formattedLabel = wrapOnDot(uniqueLabel);
|
||||
|
||||
|
@ -552,7 +561,7 @@ export function getFormBasedDatasource({
|
|||
domElement: Element,
|
||||
props: DatasourceDimensionEditorProps<FormBasedPrivateState>
|
||||
) => {
|
||||
const columnLabelMap = formBasedDatasource.uniqueLabels(props.state);
|
||||
const columnLabelMap = formBasedDatasource.uniqueLabels(props.state, props.indexPatterns);
|
||||
|
||||
render(
|
||||
<KibanaThemeProvider theme$={core.theme.theme$}>
|
||||
|
@ -746,7 +755,7 @@ export function getFormBasedDatasource({
|
|||
},
|
||||
|
||||
getPublicAPI({ state, layerId, indexPatterns }: PublicAPIProps<FormBasedPrivateState>) {
|
||||
const columnLabelMap = formBasedDatasource.uniqueLabels(state);
|
||||
const columnLabelMap = formBasedDatasource.uniqueLabels(state, indexPatterns);
|
||||
const layer = state.layers[layerId];
|
||||
const visibleColumnIds = layer.columnOrder.filter((colId) => !isReferenced(layer, colId));
|
||||
|
||||
|
|
|
@ -118,7 +118,7 @@ export const countOperation: OperationDefinition<CountIndexPatternColumn, 'field
|
|||
}
|
||||
},
|
||||
getDefaultLabel: (column, indexPattern) => {
|
||||
const field = indexPattern.getFieldByName(column.sourceField);
|
||||
const field = indexPattern?.getFieldByName(column.sourceField);
|
||||
return ofName(field, column.timeShift, column.timeScale, column.reducedTimeRange);
|
||||
},
|
||||
buildColumn({ field, previousColumn }, columnParams) {
|
||||
|
|
|
@ -129,8 +129,8 @@ export function combineErrorMessages(
|
|||
return messages.length ? messages : undefined;
|
||||
}
|
||||
|
||||
export function getSafeName(name: string, indexPattern: IndexPattern): string {
|
||||
const field = indexPattern.getFieldByName(name);
|
||||
export function getSafeName(name: string, indexPattern: IndexPattern | undefined): string {
|
||||
const field = indexPattern?.getFieldByName(name);
|
||||
return field
|
||||
? field.displayName
|
||||
: i18n.translate('xpack.lens.indexPattern.missingFieldLabel', {
|
||||
|
|
|
@ -228,6 +228,13 @@ function getExpressionForLayer(
|
|||
{
|
||||
...col,
|
||||
id: colId,
|
||||
label: col.customLabel
|
||||
? col.label
|
||||
: operationDefinitionMap[col.operationType].getDefaultLabel(
|
||||
col,
|
||||
indexPattern,
|
||||
layer.columns
|
||||
),
|
||||
},
|
||||
];
|
||||
|
||||
|
@ -368,7 +375,15 @@ function getExpressionForLayer(
|
|||
dateColumnId: firstDateHistogramColumn?.length ? [firstDateHistogramColumn[0]] : [],
|
||||
inputColumnId: [id],
|
||||
outputColumnId: [id],
|
||||
outputColumnName: [col.label],
|
||||
outputColumnName: [
|
||||
col.customLabel
|
||||
? col.label
|
||||
: operationDefinitionMap[col.operationType].getDefaultLabel(
|
||||
col,
|
||||
indexPattern,
|
||||
layer.columns
|
||||
),
|
||||
],
|
||||
targetUnit: [col.timeScale!],
|
||||
reducedTimeRange: col.reducedTimeRange ? [col.reducedTimeRange] : [],
|
||||
},
|
||||
|
|
|
@ -133,29 +133,32 @@ describe('Textbased Data Source', () => {
|
|||
|
||||
describe('uniqueLabels', () => {
|
||||
it('appends a suffix to duplicates', () => {
|
||||
const map = TextBasedDatasource.uniqueLabels({
|
||||
layers: {
|
||||
a: {
|
||||
columns: [
|
||||
{
|
||||
columnId: 'a',
|
||||
fieldName: 'Foo',
|
||||
meta: {
|
||||
type: 'number',
|
||||
const map = TextBasedDatasource.uniqueLabels(
|
||||
{
|
||||
layers: {
|
||||
a: {
|
||||
columns: [
|
||||
{
|
||||
columnId: 'a',
|
||||
fieldName: 'Foo',
|
||||
meta: {
|
||||
type: 'number',
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
columnId: 'b',
|
||||
fieldName: 'Foo',
|
||||
meta: {
|
||||
type: 'number',
|
||||
{
|
||||
columnId: 'b',
|
||||
fieldName: 'Foo',
|
||||
meta: {
|
||||
type: 'number',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
index: 'foo',
|
||||
],
|
||||
index: 'foo',
|
||||
},
|
||||
},
|
||||
},
|
||||
} as unknown as TextBasedPrivateState);
|
||||
} as unknown as TextBasedPrivateState,
|
||||
{}
|
||||
);
|
||||
|
||||
expect(map).toMatchInlineSnapshot(`
|
||||
Object {
|
||||
|
|
|
@ -370,7 +370,7 @@ export function getTextBasedDatasource({
|
|||
domElement: Element,
|
||||
props: DatasourceDimensionTriggerProps<TextBasedPrivateState>
|
||||
) => {
|
||||
const columnLabelMap = TextBasedDatasource.uniqueLabels(props.state);
|
||||
const columnLabelMap = TextBasedDatasource.uniqueLabels(props.state, props.indexPatterns);
|
||||
const layer = props.state.layers[props.layerId];
|
||||
const selectedField = layer?.allColumns?.find((column) => column.columnId === props.columnId);
|
||||
let customLabel: string | undefined = columnLabelMap[props.columnId];
|
||||
|
@ -581,7 +581,7 @@ export function getTextBasedDatasource({
|
|||
return false;
|
||||
},
|
||||
|
||||
getPublicAPI({ state, layerId }: PublicAPIProps<TextBasedPrivateState>) {
|
||||
getPublicAPI({ state, layerId, indexPatterns }: PublicAPIProps<TextBasedPrivateState>) {
|
||||
return {
|
||||
datasourceId: 'textBased',
|
||||
|
||||
|
@ -600,7 +600,7 @@ export function getTextBasedDatasource({
|
|||
getOperationForColumnId: (columnId: string) => {
|
||||
const layer = state.layers[layerId];
|
||||
const column = layer?.allColumns?.find((c) => c.columnId === columnId);
|
||||
const columnLabelMap = TextBasedDatasource.uniqueLabels(state);
|
||||
const columnLabelMap = TextBasedDatasource.uniqueLabels(state, indexPatterns);
|
||||
|
||||
if (column) {
|
||||
return {
|
||||
|
|
|
@ -171,7 +171,10 @@ export function LayerPanel(
|
|||
const columnLabelMap =
|
||||
!layerDatasource && activeVisualization.getUniqueLabels
|
||||
? activeVisualization.getUniqueLabels(props.visualizationState)
|
||||
: layerDatasource?.uniqueLabels?.(layerDatasourceConfigProps?.state);
|
||||
: layerDatasource?.uniqueLabels?.(
|
||||
layerDatasourceConfigProps?.state,
|
||||
framePublicAPI.dataViews.indexPatterns
|
||||
);
|
||||
|
||||
const isEmptyLayer = !dimensionGroups.some((d) => d.accessors.length > 0);
|
||||
const { activeId, activeGroup } = activeDimension;
|
||||
|
|
|
@ -80,7 +80,9 @@ export function buildExpression({
|
|||
dateRange: DateRange;
|
||||
nowInstant: Date;
|
||||
}): Ast | null {
|
||||
if (visualization === null) {
|
||||
// if an unregistered visualization is passed in the SO
|
||||
// then this will be set as "undefined". Relax the check to catch both
|
||||
if (visualization == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
|
|
@ -135,6 +135,10 @@ export function getSuggestions({
|
|||
// and rank them by score
|
||||
return Object.entries(visualizationMap)
|
||||
.flatMap(([visualizationId, visualization]) => {
|
||||
// in case a missing visualization type is passed via SO, just avoid to compute anything for it
|
||||
if (!visualization) {
|
||||
return [];
|
||||
}
|
||||
const supportedLayerTypes = visualization.getSupportedLayers().map(({ type }) => type);
|
||||
return datasourceTableSuggestions
|
||||
.filter((datasourceSuggestion) => {
|
||||
|
|
|
@ -52,7 +52,7 @@ export function createMockDatasource(
|
|||
cloneLayer: jest.fn((_state, _layerId, _newLayerId, getNewId) => {}),
|
||||
removeColumn: jest.fn((props) => {}),
|
||||
getLayers: jest.fn((_state) => []),
|
||||
uniqueLabels: jest.fn((_state) => ({})),
|
||||
uniqueLabels: jest.fn((_state, dataViews) => ({})),
|
||||
renderDimensionTrigger: jest.fn(),
|
||||
renderDimensionEditor: jest.fn(),
|
||||
getDropProps: jest.fn(),
|
||||
|
|
|
@ -469,7 +469,7 @@ export interface Datasource<T = unknown, P = unknown> {
|
|||
/**
|
||||
* uniqueLabels of dimensions exposed for aria-labels of dragged dimensions
|
||||
*/
|
||||
uniqueLabels: (state: T) => Record<string, string>;
|
||||
uniqueLabels: (state: T, indexPatterns: IndexPatternMap) => Record<string, string>;
|
||||
/**
|
||||
* Check the internal state integrity and returns a list of missing references
|
||||
*/
|
||||
|
|
|
@ -334,7 +334,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
expect(await PageObjects.lens.getTitle()).to.eql('lnsXYvis');
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsPie_sliceByDimensionPanel')).to.eql(
|
||||
'Top values of ip'
|
||||
'Top 3 values of ip'
|
||||
);
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsPie_sizeByDimensionPanel')).to.eql(
|
||||
'Average of bytes'
|
||||
|
@ -344,7 +344,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await PageObjects.lens.switchToVisualization('bar');
|
||||
expect(await PageObjects.lens.getTitle()).to.eql('lnsXYvis');
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsXY_xDimensionPanel')).to.eql(
|
||||
'Top values of ip'
|
||||
'Top 3 values of ip'
|
||||
);
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsXY_yDimensionPanel')).to.eql(
|
||||
'Average of bytes'
|
||||
|
@ -365,7 +365,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
'Average of bytes'
|
||||
);
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsXY_splitDimensionPanel')).to.eql(
|
||||
'Top values of ip'
|
||||
'Top 3 values of ip'
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -378,7 +378,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await PageObjects.lens.switchToVisualization('treemap');
|
||||
expect(
|
||||
await PageObjects.lens.getDimensionTriggersTexts('lnsPie_groupByDimensionPanel')
|
||||
).to.eql(['Top values of geo.dest', 'Top values of geo.src']);
|
||||
).to.eql(['Top 7 values of geo.dest', 'Top 3 values of geo.src']);
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsPie_sizeByDimensionPanel')).to.eql(
|
||||
'Average of bytes'
|
||||
);
|
||||
|
|
|
@ -56,7 +56,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
});
|
||||
|
||||
it('should allow to configure column visibility', async () => {
|
||||
expect(await PageObjects.lens.getDatatableHeaderText(0)).to.equal('Top values of ip');
|
||||
expect(await PageObjects.lens.getDatatableHeaderText(0)).to.equal('Top 3 values of ip');
|
||||
expect(await PageObjects.lens.getDatatableHeaderText(1)).to.equal('@timestamp per 3 hours');
|
||||
expect(await PageObjects.lens.getDatatableHeaderText(2)).to.equal('Average of bytes');
|
||||
|
||||
|
@ -67,7 +67,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
await PageObjects.lens.toggleColumnVisibility('lnsDatatable_rows > lns-dimensionTrigger', 4);
|
||||
|
||||
expect(await PageObjects.lens.getDatatableHeaderText(0)).to.equal('Top values of ip');
|
||||
expect(await PageObjects.lens.getDatatableHeaderText(0)).to.equal('Top 3 values of ip');
|
||||
expect(await PageObjects.lens.getDatatableHeaderText(1)).to.equal('@timestamp per 3 hours');
|
||||
expect(await PageObjects.lens.getDatatableHeaderText(2)).to.equal('Average of bytes');
|
||||
});
|
||||
|
|
|
@ -379,7 +379,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
expect(await PageObjects.lens.getDimensionTriggersTexts('lns-layerPanel-0')).to.eql([
|
||||
'@timestamp',
|
||||
'Average of bytes',
|
||||
'Top values of ip',
|
||||
'Top 3 values of ip',
|
||||
]);
|
||||
expect(await PageObjects.lens.getDimensionTriggersTexts('lns-layerPanel-1')).to.eql([
|
||||
'@timestamp [1]',
|
||||
|
|
|
@ -21,7 +21,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
const config = getService('config');
|
||||
const browser = getService('browser');
|
||||
|
||||
function getTranslationFr(term: string) {
|
||||
function getTranslationFr(term: string, field?: string, values: number = 3) {
|
||||
switch (term) {
|
||||
case 'legacyMetric':
|
||||
return 'Ancien indicateur';
|
||||
|
@ -53,6 +53,12 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
return 'enregistrements';
|
||||
case 'moving_average':
|
||||
return 'Moyenne mobile de';
|
||||
case 'average':
|
||||
return field ? `Moyenne de ${field}` : `Moyenne`;
|
||||
case 'max':
|
||||
return field ? `Maximum de ${field}` : 'Maximum';
|
||||
case 'terms':
|
||||
return field ? `${values} principales valeurs de ${field}` : 'Valeurs les plus élevées';
|
||||
case 'sum':
|
||||
return 'somme';
|
||||
default:
|
||||
|
@ -60,7 +66,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
}
|
||||
}
|
||||
|
||||
function getTranslationJa(term: string) {
|
||||
function getTranslationJa(term: string, field?: string, values: number = 3) {
|
||||
switch (term) {
|
||||
case 'legacyMetric':
|
||||
return 'レガシーメトリック';
|
||||
|
@ -91,6 +97,12 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
return '記録';
|
||||
case 'moving_average':
|
||||
return 'の移動平均';
|
||||
case 'average':
|
||||
return field ? `${field} の平均` : `平均`;
|
||||
case 'max':
|
||||
return field ? `${field} お最高値` : '最高';
|
||||
case 'terms':
|
||||
return field ? `${field}の上位の${values} 値` : 'トップの値';
|
||||
case 'sum':
|
||||
return '合計';
|
||||
default:
|
||||
|
@ -98,7 +110,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
}
|
||||
}
|
||||
|
||||
function getTranslationZh(term: string) {
|
||||
function getTranslationZh(term: string, field?: string, values: number = 3) {
|
||||
switch (term) {
|
||||
case 'legacyMetric':
|
||||
return '旧版指标';
|
||||
|
@ -129,6 +141,12 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
return '记录';
|
||||
case 'moving_average':
|
||||
return '的移动平均值';
|
||||
case 'average':
|
||||
return field ? `${field} 的平均值` : '平均值';
|
||||
case 'max':
|
||||
return field ? `${field} 的最大值` : '最大值';
|
||||
case 'terms':
|
||||
return field ? `${field} 的排名前 ${values} 的值` : `排名最前值`;
|
||||
case 'sum':
|
||||
return '求和';
|
||||
default:
|
||||
|
@ -136,7 +154,7 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
}
|
||||
}
|
||||
|
||||
function getExpectedI18nTranslator(locale: string): (chartType: string) => string {
|
||||
function getExpectedI18nTranslator(locale: string): (term: string, field?: string) => string {
|
||||
switch (locale) {
|
||||
case 'ja-JP':
|
||||
return getTranslationJa;
|
||||
|
@ -145,12 +163,12 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
case 'fr-FR':
|
||||
return getTranslationFr;
|
||||
default:
|
||||
return (v: string) => v;
|
||||
return (v: string, field?: string) => v;
|
||||
}
|
||||
}
|
||||
|
||||
describe('lens smokescreen tests', () => {
|
||||
let termTranslator: (chartType: string) => string;
|
||||
let termTranslator: (term: string, field?: string, values?: number) => string;
|
||||
|
||||
before(async () => {
|
||||
const serverArgs: string[] = config.get('kbnTestServer.serverArgs');
|
||||
|
@ -234,15 +252,17 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await listingTable.searchForItemWithName('Artistpreviouslyknownaslens');
|
||||
await PageObjects.lens.clickVisualizeListItemTitle('Artistpreviouslyknownaslens');
|
||||
await PageObjects.lens.goToTimeRange();
|
||||
await PageObjects.lens.assertLegacyMetric('Maximum of bytes', '19,986');
|
||||
await PageObjects.lens.assertLegacyMetric(termTranslator('max', 'bytes'), '19,986');
|
||||
await PageObjects.lens.switchToVisualization('lnsDatatable', termTranslator('datatable'));
|
||||
expect(await PageObjects.lens.getDatatableHeaderText()).to.eql('Maximum of bytes');
|
||||
expect(await PageObjects.lens.getDatatableHeaderText()).to.eql(
|
||||
termTranslator('max', 'bytes')
|
||||
);
|
||||
expect(await PageObjects.lens.getDatatableCellText(0, 0)).to.eql('19,986');
|
||||
await PageObjects.lens.switchToVisualization(
|
||||
'lnsLegacyMetric',
|
||||
termTranslator('legacyMetric')
|
||||
);
|
||||
await PageObjects.lens.assertLegacyMetric('Maximum of bytes', '19,986');
|
||||
await PageObjects.lens.assertLegacyMetric(termTranslator('max', 'bytes'), '19,986');
|
||||
});
|
||||
|
||||
it('should transition from a multi-layer stacked bar to a multi-layer line chart and correctly remove all layers', async () => {
|
||||
|
@ -437,10 +457,10 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
expect(await PageObjects.lens.getTitle()).to.eql('lnsXYvis');
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsPie_sliceByDimensionPanel')).to.eql(
|
||||
'Top values of ip'
|
||||
termTranslator('terms', 'ip')
|
||||
);
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsPie_sizeByDimensionPanel')).to.eql(
|
||||
'Average of bytes'
|
||||
termTranslator('average', 'bytes')
|
||||
);
|
||||
|
||||
expect(await PageObjects.lens.hasChartSwitchWarning('bar', termTranslator('bar'))).to.eql(
|
||||
|
@ -449,10 +469,10 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await PageObjects.lens.switchToVisualization('bar', termTranslator('bar'));
|
||||
expect(await PageObjects.lens.getTitle()).to.eql('lnsXYvis');
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsXY_xDimensionPanel')).to.eql(
|
||||
'Top values of ip'
|
||||
termTranslator('terms', 'ip')
|
||||
);
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsXY_yDimensionPanel')).to.eql(
|
||||
'Average of bytes'
|
||||
termTranslator('average', 'bytes')
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -467,10 +487,10 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
'@timestamp'
|
||||
);
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsXY_yDimensionPanel')).to.eql(
|
||||
'Average of bytes'
|
||||
termTranslator('average', 'bytes')
|
||||
);
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsXY_splitDimensionPanel')).to.eql(
|
||||
'Top values of ip'
|
||||
termTranslator('terms', 'ip')
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -485,9 +505,9 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
await PageObjects.lens.switchToVisualization('treemap', termTranslator('treemap'));
|
||||
expect(
|
||||
await PageObjects.lens.getDimensionTriggersTexts('lnsPie_groupByDimensionPanel')
|
||||
).to.eql(['Top values of geo.dest', 'Top values of geo.src']);
|
||||
).to.eql([termTranslator('terms', 'geo.dest', 7), termTranslator('terms', 'geo.src')]);
|
||||
expect(await PageObjects.lens.getDimensionTriggerText('lnsPie_sizeByDimensionPanel')).to.eql(
|
||||
'Average of bytes'
|
||||
termTranslator('average', 'bytes')
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -516,7 +536,9 @@ export default function ({ getService, getPageObjects }: FtrProviderContext) {
|
|||
|
||||
// Need to provide a fn for these
|
||||
// expect(await PageObjects.lens.getDatatableHeaderText()).to.eql('@timestamp per 3 hours');
|
||||
// expect(await PageObjects.lens.getDatatableHeaderText(1)).to.eql('Average of bytes');
|
||||
expect(await PageObjects.lens.getDatatableHeaderText(1)).to.eql(
|
||||
termTranslator('average', 'bytes')
|
||||
);
|
||||
expect(await PageObjects.lens.getDatatableCellText(0, 0)).to.eql('2015-09-20 00:00');
|
||||
expect(await PageObjects.lens.getDatatableCellText(0, 1)).to.eql('6,011.351');
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue