mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-06-28 09:28:55 -04:00
Add index-time scripts to geo_point field mapper (#71861)
This commit adds the ability to define an index-time geo_point field with a script parameter, allowing you to calculate points from other values within the indexed document.
This commit is contained in:
parent
0642c73ed2
commit
ee3510b766
11 changed files with 503 additions and 35 deletions
|
@ -125,18 +125,41 @@ The following parameters are accepted by `geo_point` fields:
|
|||
malformed geo-points throw an exception and reject the whole document.
|
||||
A geo-point is considered malformed if its latitude is outside the range
|
||||
-90 <= latitude <= 90, or if its longitude is outside the range -180 <= longitude <= 180.
|
||||
Note that this cannot be set if the `script` parameter is used.
|
||||
|
||||
`ignore_z_value`::
|
||||
|
||||
If `true` (default) three dimension points will be accepted (stored in source)
|
||||
but only latitude and longitude values will be indexed; the third dimension is
|
||||
ignored. If `false`, geo-points containing any more than latitude and longitude
|
||||
(two dimensions) values throw an exception and reject the whole document.
|
||||
(two dimensions) values throw an exception and reject the whole document. Note
|
||||
that this cannot be set if the `script` parameter is used.
|
||||
|
||||
<<null-value,`null_value`>>::
|
||||
|
||||
Accepts an geopoint value which is substituted for any explicit `null` values.
|
||||
Defaults to `null`, which means the field is treated as missing.
|
||||
Defaults to `null`, which means the field is treated as missing. Note that this
|
||||
cannot be set if the `script` parameter is used.
|
||||
|
||||
`on_script_error`::
|
||||
|
||||
Defines what to do if the script defined by the `script` parameter
|
||||
throws an error at indexing time. Accepts `fail` (default), which
|
||||
will cause the entire document to be rejected, and `continue`, which
|
||||
will register the field in the document's
|
||||
<<mapping-ignored-field,`_ignored`>> metadata field and continue
|
||||
indexing. This parameter can only be set if the `script` field is
|
||||
also set.
|
||||
|
||||
`script`::
|
||||
|
||||
If this parameter is set, then the field will index values generated
|
||||
by this script, rather than reading the values directly from the
|
||||
source. If a value is set for this field on the input document, then
|
||||
the document will be rejected with an error.
|
||||
Scripts are in the same format as their
|
||||
<<runtime-mapping-fields,runtime equivalent>>, and should emit points
|
||||
as a pair of (lat, lon) double values.
|
||||
|
||||
==== Using geo-points in scripts
|
||||
|
||||
|
|
|
@ -0,0 +1,206 @@
|
|||
---
|
||||
setup:
|
||||
- do:
|
||||
indices.create:
|
||||
index: locations
|
||||
body:
|
||||
settings:
|
||||
number_of_shards: 1
|
||||
number_of_replicas: 0
|
||||
mappings:
|
||||
properties:
|
||||
location_from_doc_value:
|
||||
type: geo_point
|
||||
script:
|
||||
source: |
|
||||
emit(doc["location"].lat, doc["location"].lon);
|
||||
location_from_source:
|
||||
type: geo_point
|
||||
script:
|
||||
source: |
|
||||
emit(params._source.location.lat, params._source.location.lon);
|
||||
timestamp:
|
||||
type: date
|
||||
location:
|
||||
type: geo_point
|
||||
- do:
|
||||
bulk:
|
||||
index: locations
|
||||
refresh: true
|
||||
body: |
|
||||
{"index":{}}
|
||||
{"timestamp": "1998-04-30T14:30:17-05:00", "location" : {"lat": 13.5, "lon" : 34.89}}
|
||||
{"index":{}}
|
||||
{"timestamp": "1998-04-30T14:30:53-05:00", "location" : {"lat": -7.9, "lon" : 120.78}}
|
||||
{"index":{}}
|
||||
{"timestamp": "1998-04-30T14:31:12-05:00", "location" : {"lat": 45.78, "lon" : -173.45}}
|
||||
{"index":{}}
|
||||
{"timestamp": "1998-04-30T14:31:19-05:00", "location" : {"lat": 32.45, "lon" : 45.6}}
|
||||
{"index":{}}
|
||||
{"timestamp": "1998-04-30T14:31:22-05:00", "location" : {"lat": -63.24, "lon" : 31.0}}
|
||||
{"index":{}}
|
||||
{"timestamp": "1998-04-30T14:31:27-05:00", "location" : {"lat": 0.0, "lon" : 0.0}}
|
||||
|
||||
|
||||
---
|
||||
"get mapping":
|
||||
- do:
|
||||
indices.get_mapping:
|
||||
index: locations
|
||||
- match: {locations.mappings.properties.location_from_source.type: geo_point }
|
||||
- match:
|
||||
locations.mappings.properties.location_from_source.script.source: |
|
||||
emit(params._source.location.lat, params._source.location.lon);
|
||||
- match: {locations.mappings.properties.location_from_source.script.lang: painless }
|
||||
|
||||
---
|
||||
"fetch fields from source":
|
||||
- do:
|
||||
search:
|
||||
index: locations
|
||||
body:
|
||||
sort: timestamp
|
||||
fields: [location, location_from_doc_value, location_from_source]
|
||||
- match: {hits.total.value: 6}
|
||||
- match: {hits.hits.0.fields.location.0.type: "Point" }
|
||||
- match: {hits.hits.0.fields.location.0.coordinates: [34.89, 13.5] }
|
||||
# calculated from scripts adds annoying extra precision
|
||||
- match: { hits.hits.0.fields.location_from_doc_value.0.type: "Point" }
|
||||
- match: { hits.hits.0.fields.location_from_doc_value.0.coordinates: [ 34.889999935403466, 13.499999991618097 ] }
|
||||
- match: { hits.hits.0.fields.location_from_source.0.type: "Point" }
|
||||
- match: { hits.hits.0.fields.location_from_source.0.coordinates: [ 34.889999935403466, 13.499999991618097 ] }
|
||||
|
||||
---
|
||||
"exists query":
|
||||
- do:
|
||||
search:
|
||||
index: locations
|
||||
body:
|
||||
query:
|
||||
exists:
|
||||
field: location_from_source
|
||||
- match: {hits.total.value: 6}
|
||||
|
||||
---
|
||||
"geo bounding box query":
|
||||
- do:
|
||||
search:
|
||||
index: locations
|
||||
body:
|
||||
query:
|
||||
geo_bounding_box:
|
||||
location_from_source:
|
||||
top_left:
|
||||
lat: 10
|
||||
lon: -10
|
||||
bottom_right:
|
||||
lat: -10
|
||||
lon: 10
|
||||
- match: {hits.total.value: 1}
|
||||
|
||||
---
|
||||
"geo shape query":
|
||||
- do:
|
||||
search:
|
||||
index: locations
|
||||
body:
|
||||
query:
|
||||
geo_shape:
|
||||
location_from_source:
|
||||
shape:
|
||||
type: "envelope"
|
||||
coordinates: [ [ -10, 10 ], [ 10, -10 ] ]
|
||||
- match: {hits.total.value: 1}
|
||||
|
||||
---
|
||||
"geo distance query":
|
||||
- do:
|
||||
search:
|
||||
index: locations
|
||||
body:
|
||||
query:
|
||||
geo_distance:
|
||||
distance: "2000km"
|
||||
location_from_source:
|
||||
lat: 0
|
||||
lon: 0
|
||||
- match: {hits.total.value: 1}
|
||||
|
||||
---
|
||||
"bounds agg":
|
||||
- do:
|
||||
search:
|
||||
index: locations
|
||||
body:
|
||||
aggs:
|
||||
bounds:
|
||||
geo_bounds:
|
||||
field: "location"
|
||||
wrap_longitude: false
|
||||
bounds_from_doc_value:
|
||||
geo_bounds:
|
||||
field: "location_from_doc_value"
|
||||
wrap_longitude: false
|
||||
bounds_from_source:
|
||||
geo_bounds:
|
||||
field: "location_from_source"
|
||||
wrap_longitude: false
|
||||
- match: {hits.total.value: 6}
|
||||
- match: {aggregations.bounds.bounds.top_left.lat: 45.7799999602139 }
|
||||
- match: {aggregations.bounds.bounds.top_left.lon: -173.4500000718981 }
|
||||
- match: {aggregations.bounds.bounds.bottom_right.lat: -63.240000014193356 }
|
||||
- match: {aggregations.bounds.bounds.bottom_right.lon: 120.77999993227422 }
|
||||
- match: {aggregations.bounds_from_doc_value.bounds.top_left.lat: 45.7799999602139 }
|
||||
- match: {aggregations.bounds_from_doc_value.bounds.top_left.lon: -173.4500000718981 }
|
||||
- match: {aggregations.bounds_from_doc_value.bounds.bottom_right.lat: -63.240000014193356 }
|
||||
- match: {aggregations.bounds_from_doc_value.bounds.bottom_right.lon: 120.77999993227422 }
|
||||
- match: {aggregations.bounds_from_source.bounds.top_left.lat: 45.7799999602139 }
|
||||
- match: {aggregations.bounds_from_source.bounds.top_left.lon: -173.4500000718981 }
|
||||
- match: {aggregations.bounds_from_source.bounds.bottom_right.lat: -63.240000014193356 }
|
||||
- match: {aggregations.bounds_from_source.bounds.bottom_right.lon: 120.77999993227422 }
|
||||
|
||||
---
|
||||
"geo_distance sort":
|
||||
- do:
|
||||
search:
|
||||
index: locations
|
||||
body:
|
||||
sort:
|
||||
_geo_distance:
|
||||
location_from_source:
|
||||
lat: 0.0
|
||||
lon: 0.0
|
||||
- match: {hits.total.value: 6}
|
||||
- match: {hits.hits.0._source.location.lat: 0.0 }
|
||||
- match: {hits.hits.0._source.location.lon: 0.0 }
|
||||
- match: {hits.hits.1._source.location.lat: 13.5 }
|
||||
- match: {hits.hits.1._source.location.lon: 34.89 }
|
||||
- match: {hits.hits.2._source.location.lat: 32.45 }
|
||||
- match: {hits.hits.2._source.location.lon: 45.6 }
|
||||
- match: {hits.hits.3._source.location.lat: -63.24 }
|
||||
- match: {hits.hits.3._source.location.lon: 31.0 }
|
||||
|
||||
---
|
||||
"distance_feature query":
|
||||
- do:
|
||||
search:
|
||||
index: locations
|
||||
body:
|
||||
query:
|
||||
bool:
|
||||
should:
|
||||
distance_feature:
|
||||
field: "location"
|
||||
pivot: "1000km"
|
||||
origin: [0.0, 0.0]
|
||||
|
||||
- match: {hits.total.value: 6}
|
||||
- match: {hits.hits.0._source.location.lat: 0.0 }
|
||||
- match: {hits.hits.0._source.location.lon: 0.0 }
|
||||
- match: {hits.hits.1._source.location.lat: 13.5 }
|
||||
- match: {hits.hits.1._source.location.lon: 34.89 }
|
||||
- match: {hits.hits.2._source.location.lat: 32.45 }
|
||||
- match: {hits.hits.2._source.location.lon: 45.6 }
|
||||
- match: {hits.hits.3._source.location.lat: -63.24 }
|
||||
- match: {hits.hits.3._source.location.lon: 31.0 }
|
||||
|
|
@ -89,7 +89,7 @@ public abstract class AbstractGeometryFieldMapper<T> extends FieldMapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public final ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
|
||||
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
|
||||
String geoFormat = format != null ? format : GeoJsonGeometryFormat.NAME;
|
||||
|
||||
if (parsesArrayValue) {
|
||||
|
@ -116,13 +116,13 @@ public abstract class AbstractGeometryFieldMapper<T> extends FieldMapper {
|
|||
|
||||
private final Explicit<Boolean> ignoreMalformed;
|
||||
private final Explicit<Boolean> ignoreZValue;
|
||||
private final Parser<T> parser;
|
||||
private final Parser<? extends T> parser;
|
||||
|
||||
protected AbstractGeometryFieldMapper(String simpleName, MappedFieldType mappedFieldType,
|
||||
Map<String, NamedAnalyzer> indexAnalyzers,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> ignoreZValue,
|
||||
MultiFields multiFields, CopyTo copyTo,
|
||||
Parser<T> parser) {
|
||||
Parser<? extends T> parser) {
|
||||
super(simpleName, mappedFieldType, indexAnalyzers, multiFields, copyTo, false, null);
|
||||
this.ignoreMalformed = ignoreMalformed;
|
||||
this.ignoreZValue = ignoreZValue;
|
||||
|
@ -132,10 +132,24 @@ public abstract class AbstractGeometryFieldMapper<T> extends FieldMapper {
|
|||
protected AbstractGeometryFieldMapper(String simpleName, MappedFieldType mappedFieldType,
|
||||
Explicit<Boolean> ignoreMalformed, Explicit<Boolean> ignoreZValue,
|
||||
MultiFields multiFields, CopyTo copyTo,
|
||||
Parser<T> parser) {
|
||||
Parser<? extends T> parser) {
|
||||
this(simpleName, mappedFieldType, Collections.emptyMap(), ignoreMalformed, ignoreZValue, multiFields, copyTo, parser);
|
||||
}
|
||||
|
||||
protected AbstractGeometryFieldMapper(
|
||||
String simpleName,
|
||||
MappedFieldType mappedFieldType,
|
||||
MultiFields multiFields,
|
||||
CopyTo copyTo,
|
||||
Parser<? extends T> parser,
|
||||
String onScriptError
|
||||
) {
|
||||
super(simpleName, mappedFieldType, Collections.emptyMap(), multiFields, copyTo, true, onScriptError);
|
||||
this.ignoreMalformed = new Explicit<>(false, true);
|
||||
this.ignoreZValue = new Explicit<>(false, true);
|
||||
this.parser = parser;
|
||||
}
|
||||
|
||||
@Override
|
||||
public AbstractGeometryFieldType fieldType() {
|
||||
return (AbstractGeometryFieldType) mappedFieldType;
|
||||
|
@ -155,13 +169,16 @@ public abstract class AbstractGeometryFieldMapper<T> extends FieldMapper {
|
|||
|
||||
@Override
|
||||
public final void parse(ParseContext context) throws IOException {
|
||||
if (hasScript) {
|
||||
throw new MapperParsingException("failed to parse field [" + fieldType().name() + "] of type + " + contentType() + "]",
|
||||
new IllegalArgumentException("Cannot index data directly into a field with a [script] parameter"));
|
||||
}
|
||||
parser.parse(context.parser(), v -> index(context, v), e -> {
|
||||
if (ignoreMalformed()) {
|
||||
context.addIgnoredField(fieldType().name());
|
||||
} else {
|
||||
throw new MapperParsingException(
|
||||
"Failed to parse field [" + fieldType().name() + "] of type [" + contentType() + "]",
|
||||
e
|
||||
"failed to parse field [" + fieldType().name() + "] of type [" + contentType() + "]", e
|
||||
);
|
||||
}
|
||||
});
|
||||
|
|
|
@ -39,11 +39,18 @@ public abstract class AbstractPointGeometryFieldMapper<T> extends AbstractGeomet
|
|||
protected AbstractPointGeometryFieldMapper(String simpleName, MappedFieldType mappedFieldType,
|
||||
MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
|
||||
Explicit<Boolean> ignoreZValue, ParsedPoint nullValue, CopyTo copyTo,
|
||||
Parser<T> parser) {
|
||||
Parser<? extends T> parser) {
|
||||
super(simpleName, mappedFieldType, ignoreMalformed, ignoreZValue, multiFields, copyTo, parser);
|
||||
this.nullValue = nullValue;
|
||||
}
|
||||
|
||||
protected AbstractPointGeometryFieldMapper(String simpleName, MappedFieldType mappedFieldType,
|
||||
MultiFields multiFields, CopyTo copyTo,
|
||||
Parser<? extends T> parser, String onScriptError) {
|
||||
super(simpleName, mappedFieldType, multiFields, copyTo, parser, onScriptError);
|
||||
this.nullValue = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final boolean parsesArrayValue() {
|
||||
return true;
|
||||
|
|
|
@ -11,30 +11,39 @@ import org.apache.lucene.document.LatLonDocValuesField;
|
|||
import org.apache.lucene.document.LatLonPoint;
|
||||
import org.apache.lucene.document.StoredField;
|
||||
import org.apache.lucene.geo.LatLonGeometry;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.IndexOrDocValuesQuery;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.geo.GeoJsonGeometryFormat;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoShapeUtils;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.geo.GeometryFormat;
|
||||
import org.elasticsearch.common.geo.GeometryParser;
|
||||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.geometry.Geometry;
|
||||
import org.elasticsearch.geometry.Point;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.AbstractLatLonPointIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.GeoPointFieldMapper.ParsedGeoPoint;
|
||||
import org.elasticsearch.index.query.SearchExecutionContext;
|
||||
import org.elasticsearch.script.GeoPointFieldScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptCompiler;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.lookup.FieldValues;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
|
@ -42,7 +51,7 @@ import java.util.function.Supplier;
|
|||
*
|
||||
* Uses lucene 6 LatLonPoint encoding
|
||||
*/
|
||||
public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<ParsedGeoPoint> {
|
||||
public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<GeoPoint> {
|
||||
|
||||
public static final String CONTENT_TYPE = "geo_point";
|
||||
|
||||
|
@ -58,20 +67,27 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<Parsed
|
|||
final Parameter<Boolean> indexed = Parameter.indexParam(m -> builder(m).indexed.get(), true);
|
||||
final Parameter<Boolean> hasDocValues = Parameter.docValuesParam(m -> builder(m).hasDocValues.get(), true);
|
||||
final Parameter<Boolean> stored = Parameter.storeParam(m -> builder(m).stored.get(), false);
|
||||
private final Parameter<Script> script = Parameter.scriptParam(m -> builder(m).script.get());
|
||||
private final Parameter<String> onScriptError = Parameter.onScriptErrorParam(m -> builder(m).onScriptError.get(), script);
|
||||
final Parameter<Map<String, String>> meta = Parameter.metaParam();
|
||||
|
||||
public Builder(String name, boolean ignoreMalformedByDefault) {
|
||||
private final ScriptCompiler scriptCompiler;
|
||||
|
||||
public Builder(String name, ScriptCompiler scriptCompiler, boolean ignoreMalformedByDefault) {
|
||||
super(name);
|
||||
this.ignoreMalformed = ignoreMalformedParam(m -> builder(m).ignoreMalformed.get(), ignoreMalformedByDefault);
|
||||
this.nullValue = nullValueParam(
|
||||
m -> builder(m).nullValue.get(),
|
||||
(n, c, o) -> parseNullValue(o, ignoreZValue.get().value(), ignoreMalformed.get().value()),
|
||||
() -> null).acceptsNull();
|
||||
this.scriptCompiler = Objects.requireNonNull(scriptCompiler);
|
||||
this.script.precludesParameters(nullValue, ignoreMalformed, ignoreZValue);
|
||||
addScriptValidation(script, indexed, hasDocValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<Parameter<?>> getParameters() {
|
||||
return Arrays.asList(hasDocValues, indexed, stored, ignoreMalformed, ignoreZValue, nullValue, meta);
|
||||
return Arrays.asList(hasDocValues, indexed, stored, ignoreMalformed, ignoreZValue, nullValue, script, onScriptError, meta);
|
||||
}
|
||||
|
||||
public Builder docValues(boolean hasDocValues) {
|
||||
|
@ -98,6 +114,17 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<Parsed
|
|||
return point;
|
||||
}
|
||||
|
||||
private FieldValues<GeoPoint> scriptValues() {
|
||||
if (this.script.get() == null) {
|
||||
return null;
|
||||
}
|
||||
GeoPointFieldScript.Factory factory = scriptCompiler.compile(this.script.get(), GeoPointFieldScript.CONTEXT);
|
||||
return factory == null ? null : (lookup, ctx, doc, consumer) -> factory
|
||||
.newFactory(name, script.get().getParams(), lookup)
|
||||
.newInstance(ctx)
|
||||
.runGeoPointForDoc(doc, consumer);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper build(ContentPath contentPath) {
|
||||
Parser<ParsedGeoPoint> geoParser = new PointParser<>(
|
||||
|
@ -110,17 +137,28 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<Parsed
|
|||
(ParsedGeoPoint) nullValue.get(),
|
||||
ignoreZValue.get().value(),
|
||||
ignoreMalformed.get().value());
|
||||
GeoPointFieldType ft
|
||||
= new GeoPointFieldType(buildFullName(contentPath), indexed.get(), stored.get(), hasDocValues.get(), geoParser, meta.get());
|
||||
GeoPointFieldType ft = new GeoPointFieldType(
|
||||
buildFullName(contentPath),
|
||||
indexed.get(),
|
||||
stored.get(),
|
||||
hasDocValues.get(),
|
||||
geoParser,
|
||||
scriptValues(),
|
||||
meta.get());
|
||||
if (this.script.get() == null) {
|
||||
return new GeoPointFieldMapper(name, ft, multiFieldsBuilder.build(this, contentPath),
|
||||
copyTo.build(), geoParser, this);
|
||||
}
|
||||
return new GeoPointFieldMapper(name, ft, geoParser, this);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static TypeParser PARSER = new TypeParser((n, c) -> new Builder(n, IGNORE_MALFORMED_SETTING.get(c.getSettings())));
|
||||
public static TypeParser PARSER
|
||||
= new TypeParser((n, c) -> new Builder(n, c.scriptCompiler(), IGNORE_MALFORMED_SETTING.get(c.getSettings())));
|
||||
|
||||
private final Builder builder;
|
||||
private final FieldValues<GeoPoint> scriptValues;
|
||||
|
||||
public GeoPointFieldMapper(String simpleName, MappedFieldType mappedFieldType,
|
||||
MultiFields multiFields, CopyTo copyTo,
|
||||
|
@ -130,15 +168,23 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<Parsed
|
|||
builder.ignoreMalformed.get(), builder.ignoreZValue.get(), builder.nullValue.get(),
|
||||
copyTo, parser);
|
||||
this.builder = builder;
|
||||
this.scriptValues = null;
|
||||
}
|
||||
|
||||
public GeoPointFieldMapper(String simpleName, MappedFieldType mappedFieldType,
|
||||
Parser<ParsedGeoPoint> parser, Builder builder) {
|
||||
super(simpleName, mappedFieldType, MultiFields.empty(), CopyTo.empty(), parser, builder.onScriptError.get());
|
||||
this.builder = builder;
|
||||
this.scriptValues = builder.scriptValues();
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldMapper.Builder getMergeBuilder() {
|
||||
return new Builder(simpleName(), builder.ignoreMalformed.getDefaultValue().value()).init(this);
|
||||
return new Builder(simpleName(), builder.scriptCompiler, builder.ignoreMalformed.getDefaultValue().value()).init(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void index(ParseContext context, ParsedGeoPoint geometry) throws IOException {
|
||||
protected void index(ParseContext context, GeoPoint geometry) throws IOException {
|
||||
if (fieldType().isSearchable()) {
|
||||
context.doc().add(new LatLonPoint(fieldType().name(), geometry.lat(), geometry.lon()));
|
||||
}
|
||||
|
@ -154,6 +200,17 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<Parsed
|
|||
multiFields.parse(this, context.createExternalValueContext(geometry.geohash()));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void indexScriptValues(SearchLookup searchLookup, LeafReaderContext readerContext, int doc, ParseContext parseContext) {
|
||||
this.scriptValues.valuesForDoc(searchLookup, readerContext, doc, point -> {
|
||||
try {
|
||||
index(parseContext, point);
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e); // only thrown by MultiFields which is always null
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
|
@ -161,13 +218,17 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<Parsed
|
|||
|
||||
public static class GeoPointFieldType extends AbstractGeometryFieldType implements GeoShapeQueryable {
|
||||
|
||||
private static final GeometryParser PARSER = new GeometryParser(true, true, true);
|
||||
private final FieldValues<GeoPoint> scriptValues;
|
||||
|
||||
private GeoPointFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues,
|
||||
Parser<?> parser, Map<String, String> meta) {
|
||||
Parser<ParsedGeoPoint> parser, FieldValues<GeoPoint> scriptValues, Map<String, String> meta) {
|
||||
super(name, indexed, stored, hasDocValues, true, parser, meta);
|
||||
this.scriptValues = scriptValues;
|
||||
}
|
||||
|
||||
public GeoPointFieldType(String name) {
|
||||
this(name, true, false, true, null, Collections.emptyMap());
|
||||
this(name, true, false, true, null, null, Collections.emptyMap());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -175,6 +236,19 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<Parsed
|
|||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValueFetcher valueFetcher(SearchExecutionContext context, String format) {
|
||||
if (scriptValues == null) {
|
||||
return super.valueFetcher(context, format);
|
||||
}
|
||||
String geoFormat = format != null ? format : GeoJsonGeometryFormat.NAME;
|
||||
GeometryFormat<Geometry> geometryFormat = PARSER.geometryFormat(geoFormat);
|
||||
return FieldValues.valueFetcher(scriptValues, v -> {
|
||||
GeoPoint p = (GeoPoint) v;
|
||||
return geometryFormat.toXContentAsObject(new Point(p.lon(), p.lat()));
|
||||
}, context);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query geoShapeQuery(Geometry shape, String fieldName, ShapeRelation relation, SearchExecutionContext context) {
|
||||
final LatLonGeometry[] luceneGeometries = GeoShapeUtils.toLuceneGeometry(fieldName, context, shape, relation);
|
||||
|
@ -270,4 +344,5 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<Parsed
|
|||
return super.hashCode();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -9,10 +9,13 @@
|
|||
package org.elasticsearch.script;
|
||||
|
||||
import org.apache.lucene.document.LatLonDocValuesField;
|
||||
import org.apache.lucene.geo.GeoEncodingUtils;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
|
||||
import static org.apache.lucene.geo.GeoEncodingUtils.encodeLongitude;
|
||||
|
@ -39,6 +42,17 @@ public abstract class GeoPointFieldScript extends AbstractLongFieldScript {
|
|||
super(fieldName, params, searchLookup, ctx);
|
||||
}
|
||||
|
||||
public void runGeoPointForDoc(int doc, Consumer<GeoPoint> consumer) {
|
||||
runForDoc(doc);
|
||||
GeoPoint point = new GeoPoint();
|
||||
for (int i = 0; i < count(); i++) {
|
||||
final int lat = (int) (values()[i] >>> 32);
|
||||
final int lon = (int) (values()[i] & 0xFFFFFFFF);
|
||||
point.reset(GeoEncodingUtils.decodeLatitude(lat), GeoEncodingUtils.decodeLongitude(lon));
|
||||
consumer.accept(point);
|
||||
}
|
||||
}
|
||||
|
||||
protected void emit(double lat, double lon) {
|
||||
int latitudeEncoded = encodeLatitude(lat);
|
||||
int longitudeEncoded = encodeLongitude(lon);
|
||||
|
|
|
@ -111,7 +111,10 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
|
|||
fieldType = new NumberFieldMapper.Builder(fieldName, NumberFieldMapper.NumberType.BYTE, ScriptCompiler.NONE, false, true)
|
||||
.docValues(docValues).build(contentPath).fieldType();
|
||||
} else if (type.equals("geo_point")) {
|
||||
fieldType = new GeoPointFieldMapper.Builder(fieldName, false).docValues(docValues).build(contentPath).fieldType();
|
||||
fieldType = new GeoPointFieldMapper.Builder(fieldName, ScriptCompiler.NONE, false)
|
||||
.docValues(docValues)
|
||||
.build(contentPath)
|
||||
.fieldType();
|
||||
} else if (type.equals("binary")) {
|
||||
fieldType = new BinaryFieldMapper.Builder(fieldName, docValues).build(contentPath).fieldType();
|
||||
} else {
|
||||
|
|
|
@ -313,7 +313,7 @@ public class GeoPointFieldMapperTests extends MapperTestCase {
|
|||
MapperParsingException.class,
|
||||
() -> mapper.parse(source(b -> b.field("field", "1234.333")))
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("Failed to parse field [field] of type [geo_point]"));
|
||||
assertThat(e.getMessage(), containsString("failed to parse field [field] of type [geo_point]"));
|
||||
assertThat(e.getRootCause().getMessage(), containsString("unsupported symbol [.] in geohash [1234.333]"));
|
||||
}
|
||||
|
||||
|
@ -361,4 +361,34 @@ public class GeoPointFieldMapperTests extends MapperTestCase {
|
|||
assumeFalse("Test implemented in a follow up", true);
|
||||
return null;
|
||||
}
|
||||
|
||||
public void testScriptAndPrecludedParameters() {
|
||||
{
|
||||
Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", "geo_point");
|
||||
b.field("script", "test");
|
||||
b.field("ignore_z_value", "true");
|
||||
})));
|
||||
assertThat(e.getMessage(),
|
||||
equalTo("Failed to parse mapping: Field [ignore_z_value] cannot be set in conjunction with field [script]"));
|
||||
}
|
||||
{
|
||||
Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", "geo_point");
|
||||
b.field("script", "test");
|
||||
b.field("null_value", "POINT (1 1)");
|
||||
})));
|
||||
assertThat(e.getMessage(),
|
||||
equalTo("Failed to parse mapping: Field [null_value] cannot be set in conjunction with field [script]"));
|
||||
}
|
||||
{
|
||||
Exception e = expectThrows(MapperParsingException.class, () -> createDocumentMapper(fieldMapping(b -> {
|
||||
b.field("type", "long");
|
||||
b.field("script", "test");
|
||||
b.field("ignore_malformed", "true");
|
||||
})));
|
||||
assertThat(e.getMessage(),
|
||||
equalTo("Failed to parse mapping: Field [ignore_malformed] cannot be set in conjunction with field [script]"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,8 @@
|
|||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.elasticsearch.script.ScriptCompiler;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -15,7 +17,8 @@ import java.util.Map;
|
|||
public class GeoPointFieldTypeTests extends FieldTypeTestCase {
|
||||
|
||||
public void testFetchSourceValue() throws IOException {
|
||||
MappedFieldType mapper = new GeoPointFieldMapper.Builder("field", false).build(new ContentPath()).fieldType();
|
||||
MappedFieldType mapper
|
||||
= new GeoPointFieldMapper.Builder("field", ScriptCompiler.NONE, false).build(new ContentPath()).fieldType();
|
||||
|
||||
Map<String, Object> jsonPoint = Map.of("type", "Point", "coordinates", List.of(42.0, 27.1));
|
||||
Map<String, Object> otherJsonPoint = Map.of("type", "Point", "coordinates", List.of(30.0, 50.0));
|
||||
|
|
|
@ -0,0 +1,90 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License
|
||||
* 2.0 and the Server Side Public License, v 1; you may not use this file except
|
||||
* in compliance with, at your election, the Elastic License 2.0 or the Server
|
||||
* Side Public License, v 1.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.script.GeoPointFieldScript;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
public class GeoPointScriptMapperTests extends MapperScriptTestCase<GeoPointFieldScript.Factory> {
|
||||
|
||||
private static GeoPointFieldScript.Factory factory(Consumer<GeoPointFieldScript> executor) {
|
||||
return new GeoPointFieldScript.Factory() {
|
||||
@Override
|
||||
public GeoPointFieldScript.LeafFactory newFactory(String fieldName, Map<String, Object> params, SearchLookup searchLookup) {
|
||||
return new GeoPointFieldScript.LeafFactory() {
|
||||
@Override
|
||||
public GeoPointFieldScript newInstance(LeafReaderContext ctx) {
|
||||
return new GeoPointFieldScript(fieldName, params, searchLookup, ctx) {
|
||||
@Override
|
||||
public void execute() {
|
||||
executor.accept(this);
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String type() {
|
||||
return "geo_point";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoPointFieldScript.Factory serializableScript() {
|
||||
return factory(s -> {});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoPointFieldScript.Factory errorThrowingScript() {
|
||||
return factory(s -> {
|
||||
throw new UnsupportedOperationException("Oops");
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoPointFieldScript.Factory singleValueScript() {
|
||||
return factory(s -> s.emit(1));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoPointFieldScript.Factory multipleValuesScript() {
|
||||
return factory(s -> {
|
||||
s.emit(1);
|
||||
s.emit(2);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertMultipleValues(IndexableField[] fields) {
|
||||
assertEquals(4, fields.length);
|
||||
assertEquals("LatLonPoint <field:0.0,8.381903171539307E-8>", fields[0].toString());
|
||||
assertEquals("LatLonDocValuesField <field:0.0,8.381903171539307E-8>", fields[1].toString());
|
||||
assertEquals("LatLonPoint <field:0.0,1.6763806343078613E-7>", fields[2].toString());
|
||||
assertEquals("LatLonDocValuesField <field:0.0,1.6763806343078613E-7>", fields[3].toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertDocValuesDisabled(IndexableField[] fields) {
|
||||
assertEquals(1, fields.length);
|
||||
assertEquals("LatLonPoint <field:0.0,8.381903171539307E-8>", fields[0].toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void assertIndexDisabled(IndexableField[] fields) {
|
||||
assertEquals(1, fields.length);
|
||||
assertEquals("LatLonDocValuesField <field:0.0,8.381903171539307E-8>", fields[0].toString());
|
||||
}
|
||||
}
|
|
@ -145,7 +145,7 @@ public abstract class CartesianFieldMapperTests extends MapperTestCase {
|
|||
MapperParsingException e = expectThrows(MapperParsingException.class,
|
||||
() -> mapper2.parse(source(b -> b.field(FIELD_NAME, "POINT (2000.1 305.6 34567.33)")))
|
||||
);
|
||||
assertThat(e.getMessage(), containsString("Failed to parse field [" + FIELD_NAME + "] of type"));
|
||||
assertThat(e.getMessage(), containsString("failed to parse field [" + FIELD_NAME + "] of type"));
|
||||
assertThat(e.getRootCause().getMessage(),
|
||||
containsString("found Z value [34567.33] but [ignore_z_value] parameter is [false]"));
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue