mirror of
https://github.com/elastic/kibana.git
synced 2025-04-24 17:59:23 -04:00
* [Maps] refactor feature id assignment * add test case for falsy id value * review feedback
This commit is contained in:
parent
4a4c860604
commit
a7ae57e485
10 changed files with 162 additions and 79 deletions
|
@ -11,7 +11,6 @@ import {
|
|||
DECIMAL_DEGREES_PRECISION,
|
||||
ES_GEO_FIELD_TYPE,
|
||||
ES_SPATIAL_RELATIONS,
|
||||
FEATURE_ID_PROPERTY_NAME,
|
||||
GEO_JSON_TYPE,
|
||||
POLYGON_COORDINATES_EXTERIOR_INDEX,
|
||||
LON_INDEX,
|
||||
|
@ -81,12 +80,10 @@ export function hitsToGeoJson(hits, flattenHit, geoFieldName, geoFieldType) {
|
|||
features.push({
|
||||
type: 'Feature',
|
||||
geometry: tmpGeometriesAccumulator[j],
|
||||
properties: {
|
||||
...properties,
|
||||
// _id is not unique across Kibana index pattern. Multiple ES indices could have _id collisions
|
||||
// Need to prefix with _index to guarantee uniqueness
|
||||
[FEATURE_ID_PROPERTY_NAME]: `${properties._index}:${properties._id}:${j}`
|
||||
}
|
||||
// _id is not unique across Kibana index pattern. Multiple ES indices could have _id collisions
|
||||
// Need to prefix with _index to guarantee uniqueness
|
||||
id: `${properties._index}:${properties._id}:${j}`,
|
||||
properties,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -74,8 +74,8 @@ describe('hitsToGeoJson', () => {
|
|||
coordinates: [100, 20],
|
||||
type: 'Point',
|
||||
},
|
||||
id: 'index1:doc1:0',
|
||||
properties: {
|
||||
__kbn__feature_id__: 'index1:doc1:0',
|
||||
_id: 'doc1',
|
||||
_index: 'index1',
|
||||
},
|
||||
|
@ -139,8 +139,8 @@ describe('hitsToGeoJson', () => {
|
|||
coordinates: [100, 20],
|
||||
type: 'Point',
|
||||
},
|
||||
id: 'index1:doc1:0',
|
||||
properties: {
|
||||
__kbn__feature_id__: 'index1:doc1:0',
|
||||
_id: 'doc1',
|
||||
_index: 'index1',
|
||||
myField: 8
|
||||
|
@ -152,8 +152,8 @@ describe('hitsToGeoJson', () => {
|
|||
coordinates: [110, 30],
|
||||
type: 'Point',
|
||||
},
|
||||
id: 'index1:doc1:1',
|
||||
properties: {
|
||||
__kbn__feature_id__: 'index1:doc1:1',
|
||||
_id: 'doc1',
|
||||
_index: 'index1',
|
||||
myField: 8
|
||||
|
|
|
@ -10,7 +10,6 @@ import {
|
|||
ES_GEO_FIELD_TYPE,
|
||||
GEOJSON_FILE,
|
||||
ES_SIZE_LIMIT,
|
||||
FEATURE_ID_PROPERTY_NAME
|
||||
} from '../../../../common/constants';
|
||||
import { ClientFileCreateSourceEditor } from './create_client_file_source_editor';
|
||||
import { ESSearchSource } from '../es_search_source';
|
||||
|
@ -137,21 +136,8 @@ export class GeojsonFileSource extends AbstractVectorSource {
|
|||
}
|
||||
|
||||
async getGeoJsonWithMeta() {
|
||||
const copiedPropsFeatures = this._descriptor.__featureCollection.features.map((feature, index) => {
|
||||
const properties = feature.properties ? { ...feature.properties } : {};
|
||||
properties[FEATURE_ID_PROPERTY_NAME] = index;
|
||||
return {
|
||||
type: 'Feature',
|
||||
geometry: feature.geometry,
|
||||
properties,
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
data: {
|
||||
type: 'FeatureCollection',
|
||||
features: copiedPropsFeatures
|
||||
},
|
||||
data: this._descriptor.__featureCollection,
|
||||
meta: {}
|
||||
};
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
import { AbstractVectorSource } from '../vector_source';
|
||||
import { VECTOR_SHAPE_TYPES } from '../vector_feature_types';
|
||||
import React from 'react';
|
||||
import { EMS_FILE, FEATURE_ID_PROPERTY_NAME, FIELD_ORIGIN } from '../../../../common/constants';
|
||||
import { EMS_FILE, FIELD_ORIGIN } from '../../../../common/constants';
|
||||
import { getEMSClient } from '../../../meta';
|
||||
import { EMSFileCreateSourceEditor } from './create_source_editor';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
|
@ -94,7 +94,7 @@ export class EMSFileSource extends AbstractVectorSource {
|
|||
return field.type === 'id';
|
||||
});
|
||||
featureCollection.features.forEach((feature, index) => {
|
||||
feature.properties[FEATURE_ID_PROPERTY_NAME] = emsIdField
|
||||
feature.id = emsIdField
|
||||
? feature.properties[emsIdField.id]
|
||||
: index;
|
||||
});
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
|
||||
import { RENDER_AS } from './render_as';
|
||||
import { getTileBoundingBox } from './geo_tile_utils';
|
||||
import { EMPTY_FEATURE_COLLECTION, FEATURE_ID_PROPERTY_NAME } from '../../../../common/constants';
|
||||
import { EMPTY_FEATURE_COLLECTION } from '../../../../common/constants';
|
||||
|
||||
export function convertToGeoJson({ table, renderAs }) {
|
||||
|
||||
|
@ -34,9 +34,7 @@ export function convertToGeoJson({ table, renderAs }) {
|
|||
return;
|
||||
}
|
||||
|
||||
const properties = {
|
||||
[FEATURE_ID_PROPERTY_NAME]: gridKey
|
||||
};
|
||||
const properties = {};
|
||||
metricColumns.forEach(metricColumn => {
|
||||
properties[metricColumn.aggConfig.id] = row[metricColumn.id];
|
||||
});
|
||||
|
@ -49,6 +47,7 @@ export function convertToGeoJson({ table, renderAs }) {
|
|||
geocentroidColumn,
|
||||
renderAs,
|
||||
}),
|
||||
id: gridKey,
|
||||
properties
|
||||
});
|
||||
});
|
||||
|
|
|
@ -6,8 +6,6 @@
|
|||
|
||||
import _ from 'lodash';
|
||||
|
||||
import { FEATURE_ID_PROPERTY_NAME } from '../../../../common/constants';
|
||||
|
||||
const LAT_INDEX = 0;
|
||||
const LON_INDEX = 1;
|
||||
|
||||
|
@ -47,10 +45,10 @@ export function convertToLines(esResponse) {
|
|||
type: 'LineString',
|
||||
coordinates: [[sourceCentroid.location.lon, sourceCentroid.location.lat], dest]
|
||||
},
|
||||
id: `${dest.join()},${key}`,
|
||||
properties: {
|
||||
[FEATURE_ID_PROPERTY_NAME]: `${dest.join()},${key}`,
|
||||
...rest
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ import { CreateSourceEditor } from './create_source_editor';
|
|||
import { getKibanaRegionList } from '../../../meta';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { getDataSourceLabel } from '../../../../common/i18n_getters';
|
||||
import { FEATURE_ID_PROPERTY_NAME, FIELD_ORIGIN } from '../../../../common/constants';
|
||||
import { FIELD_ORIGIN } from '../../../../common/constants';
|
||||
import { KibanaRegionField } from '../../fields/kibana_region_field';
|
||||
|
||||
export class KibanaRegionmapSource extends AbstractVectorSource {
|
||||
|
@ -91,9 +91,6 @@ export class KibanaRegionmapSource extends AbstractVectorSource {
|
|||
featureCollectionPath: vectorFileMeta.meta.feature_collection_path,
|
||||
fetchUrl: vectorFileMeta.url
|
||||
});
|
||||
featureCollection.features.forEach((feature, index) => {
|
||||
feature.properties[FEATURE_ID_PROPERTY_NAME] = index;
|
||||
});
|
||||
return {
|
||||
data: featureCollection
|
||||
};
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import _ from 'lodash';
|
||||
import { FEATURE_ID_PROPERTY_NAME } from '../../../common/constants';
|
||||
|
||||
let idCounter = 0;
|
||||
|
||||
function generateNumericalId() {
|
||||
const newId = idCounter < Number.MAX_SAFE_INTEGER ? idCounter : 0;
|
||||
idCounter = newId + 1;
|
||||
return newId;
|
||||
}
|
||||
|
||||
export function assignFeatureIds(featureCollection) {
|
||||
|
||||
// wrt https://github.com/elastic/kibana/issues/39317
|
||||
// In constrained resource environments, mapbox-gl may throw a stackoverflow error due to hitting the browser's recursion limit. This crashes Kibana.
|
||||
// This error is thrown in mapbox-gl's quicksort implementation, when it is sorting all the features by id.
|
||||
// This is a work-around to avoid hitting such a worst-case
|
||||
// This was tested as a suitable work-around for mapbox-gl 0.54
|
||||
// The core issue itself is likely related to https://github.com/mapbox/mapbox-gl-js/issues/6086
|
||||
|
||||
// This only shuffles the id-assignment, _not_ the features in the collection
|
||||
// The reason for this is that we do not want to modify the feature-ordering, which is the responsiblity of the VectorSource#.
|
||||
const ids = [];
|
||||
for (let i = 0; i < featureCollection.features.length; i++) {
|
||||
const id = generateNumericalId();
|
||||
ids.push(id);
|
||||
}
|
||||
|
||||
const randomizedIds = _.shuffle(ids);
|
||||
const features = [];
|
||||
for (let i = 0; i < featureCollection.features.length; i++) {
|
||||
const numericId = randomizedIds[i];
|
||||
const feature = featureCollection.features[i];
|
||||
features.push({
|
||||
type: 'Feature',
|
||||
geometry: feature.geometry, // do not copy geometry, this object can be massive
|
||||
properties: {
|
||||
// preserve feature id provided by source so features can be referenced across fetches
|
||||
[FEATURE_ID_PROPERTY_NAME]: feature.id == null ? numericId : feature.id,
|
||||
// create new object for properties so original is not polluted with kibana internal props
|
||||
...feature.properties,
|
||||
},
|
||||
id: numericId, // Mapbox feature state id, must be integer
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features
|
||||
};
|
||||
}
|
|
@ -0,0 +1,83 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
|
||||
import { assignFeatureIds } from './assign_feature_ids';
|
||||
import { FEATURE_ID_PROPERTY_NAME } from '../../../common/constants';
|
||||
|
||||
const featureId = 'myFeature1';
|
||||
|
||||
test('should provide unique id when feature.id is not provided', () => {
|
||||
const featureCollection = {
|
||||
features: [
|
||||
{
|
||||
properties: {}
|
||||
},
|
||||
{
|
||||
properties: {}
|
||||
},
|
||||
]
|
||||
};
|
||||
|
||||
const updatedFeatureCollection = assignFeatureIds(featureCollection);
|
||||
const feature1 = updatedFeatureCollection.features[0];
|
||||
const feature2 = updatedFeatureCollection.features[1];
|
||||
expect(typeof feature1.id).toBe('number');
|
||||
expect(typeof feature2.id).toBe('number');
|
||||
expect(feature1.id).toBe(feature1.properties[FEATURE_ID_PROPERTY_NAME]);
|
||||
expect(feature1.id).not.toBe(feature2.id);
|
||||
});
|
||||
|
||||
test('should preserve feature id when provided', () => {
|
||||
const featureCollection = {
|
||||
features: [
|
||||
{
|
||||
id: featureId,
|
||||
properties: {}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
const updatedFeatureCollection = assignFeatureIds(featureCollection);
|
||||
const feature1 = updatedFeatureCollection.features[0];
|
||||
expect(typeof feature1.id).toBe('number');
|
||||
expect(feature1.id).not.toBe(feature1.properties[FEATURE_ID_PROPERTY_NAME]);
|
||||
expect(feature1.properties[FEATURE_ID_PROPERTY_NAME]).toBe(featureId);
|
||||
});
|
||||
|
||||
test('should preserve feature id for falsy value', () => {
|
||||
const featureCollection = {
|
||||
features: [
|
||||
{
|
||||
id: 0,
|
||||
properties: {}
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
const updatedFeatureCollection = assignFeatureIds(featureCollection);
|
||||
const feature1 = updatedFeatureCollection.features[0];
|
||||
expect(typeof feature1.id).toBe('number');
|
||||
expect(feature1.id).not.toBe(feature1.properties[FEATURE_ID_PROPERTY_NAME]);
|
||||
expect(feature1.properties[FEATURE_ID_PROPERTY_NAME]).toBe(0);
|
||||
});
|
||||
|
||||
test('should not modify original feature properties', () => {
|
||||
const featureProperties = {};
|
||||
const featureCollection = {
|
||||
features: [
|
||||
{
|
||||
id: featureId,
|
||||
properties: featureProperties
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
const updatedFeatureCollection = assignFeatureIds(featureCollection);
|
||||
const feature1 = updatedFeatureCollection.features[0];
|
||||
expect(feature1.properties[FEATURE_ID_PROPERTY_NAME]).toBe(featureId);
|
||||
expect(featureProperties).not.toHaveProperty(FEATURE_ID_PROPERTY_NAME);
|
||||
});
|
||||
|
|
@ -23,6 +23,7 @@ import { isRefreshOnlyQuery } from './util/is_refresh_only_query';
|
|||
import { EuiIcon } from '@elastic/eui';
|
||||
import { i18n } from '@kbn/i18n';
|
||||
import { DataRequestAbortError } from './util/data_request';
|
||||
import { assignFeatureIds } from './util/assign_feature_ids';
|
||||
|
||||
const VISIBILITY_FILTER_CLAUSE = ['all',
|
||||
[
|
||||
|
@ -59,16 +60,6 @@ const POINT_LAYER_MB_FILTER = [...VISIBILITY_FILTER_CLAUSE,
|
|||
]
|
||||
];
|
||||
|
||||
|
||||
let idCounter = 0;
|
||||
|
||||
function generateNumericalId() {
|
||||
const newId = idCounter < Number.MAX_SAFE_INTEGER ? idCounter : 0;
|
||||
idCounter = newId + 1;
|
||||
return newId;
|
||||
}
|
||||
|
||||
|
||||
export class VectorLayer extends AbstractLayer {
|
||||
|
||||
static type = LAYER_TYPE.VECTOR;
|
||||
|
@ -478,15 +469,15 @@ export class VectorLayer extends AbstractLayer {
|
|||
try {
|
||||
startLoading(SOURCE_DATA_ID_ORIGIN, requestToken, searchFilters);
|
||||
const layerName = await this.getDisplayName();
|
||||
const { data: featureCollection, meta } =
|
||||
const { data: sourceFeatureCollection, meta } =
|
||||
await this._source.getGeoJsonWithMeta(layerName, searchFilters,
|
||||
registerCancelCallback.bind(null, requestToken)
|
||||
);
|
||||
this._assignIdsToFeatures(featureCollection);
|
||||
stopLoading(SOURCE_DATA_ID_ORIGIN, requestToken, featureCollection, meta);
|
||||
const layerFeatureCollection = assignFeatureIds(sourceFeatureCollection);
|
||||
stopLoading(SOURCE_DATA_ID_ORIGIN, requestToken, layerFeatureCollection, meta);
|
||||
return {
|
||||
refreshed: true,
|
||||
featureCollection: featureCollection
|
||||
featureCollection: layerFeatureCollection
|
||||
};
|
||||
} catch (error) {
|
||||
if (!(error instanceof DataRequestAbortError)) {
|
||||
|
@ -498,31 +489,6 @@ export class VectorLayer extends AbstractLayer {
|
|||
}
|
||||
}
|
||||
|
||||
_assignIdsToFeatures(featureCollection) {
|
||||
|
||||
//wrt https://github.com/elastic/kibana/issues/39317
|
||||
//In constrained resource environments, mapbox-gl may throw a stackoverflow error due to hitting the browser's recursion limit. This crashes Kibana.
|
||||
//This error is thrown in mapbox-gl's quicksort implementation, when it is sorting all the features by id.
|
||||
//This is a work-around to avoid hitting such a worst-case
|
||||
//This was tested as a suitable work-around for mapbox-gl 0.54
|
||||
//The core issue itself is likely related to https://github.com/mapbox/mapbox-gl-js/issues/6086
|
||||
|
||||
//This only shuffles the id-assignment, _not_ the features in the collection
|
||||
//The reason for this is that we do not want to modify the feature-ordering, which is the responsiblity of the VectorSource#.
|
||||
const ids = [];
|
||||
for (let i = 0; i < featureCollection.features.length; i++) {
|
||||
const id = generateNumericalId();
|
||||
ids.push(id);
|
||||
}
|
||||
|
||||
const randomizedIds = _.shuffle(ids);
|
||||
for (let i = 0; i < featureCollection.features.length; i++) {
|
||||
const id = randomizedIds[i];
|
||||
const feature = featureCollection.features[i];
|
||||
feature.id = id; // Mapbox feature state id, must be integer
|
||||
}
|
||||
}
|
||||
|
||||
async syncData(syncContext) {
|
||||
if (!this.isVisible() || !this.showAtZoomLevel(syncContext.dataFilters.zoom)) {
|
||||
return;
|
||||
|
@ -534,8 +500,8 @@ export class VectorLayer extends AbstractLayer {
|
|||
}
|
||||
|
||||
const joinStates = await this._syncJoins(syncContext);
|
||||
await this._performInnerJoins(sourceResult, joinStates, syncContext.updateSourceData);
|
||||
|
||||
await this._performInnerJoins(sourceResult, joinStates, syncContext.updateSourceData);
|
||||
}
|
||||
|
||||
_getSourceFeatureCollection() {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue