mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-06-28 09:28:55 -04:00
Properly handle multi fields in block loaders with synthetic source enabled (#127483)
This commit is contained in:
parent
ac6c7a958d
commit
0c1b3acee2
23 changed files with 538 additions and 305 deletions
|
@ -378,7 +378,8 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
||||||
if (hasDocValues() && (blContext.fieldExtractPreference() != FieldExtractPreference.STORED || isSyntheticSource)) {
|
if (hasDocValues() && (blContext.fieldExtractPreference() != FieldExtractPreference.STORED || isSyntheticSource)) {
|
||||||
return new BlockDocValuesReader.DoublesBlockLoader(name(), l -> l / scalingFactor);
|
return new BlockDocValuesReader.DoublesBlockLoader(name(), l -> l / scalingFactor);
|
||||||
}
|
}
|
||||||
if (isSyntheticSource) {
|
// Multi fields don't have fallback synthetic source.
|
||||||
|
if (isSyntheticSource && blContext.parentField(name()) == null) {
|
||||||
return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) {
|
return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) {
|
||||||
@Override
|
@Override
|
||||||
public Builder builder(BlockFactory factory, int expectedCount) {
|
public Builder builder(BlockFactory factory, int expectedCount) {
|
||||||
|
|
|
@ -350,7 +350,8 @@ public class BooleanFieldMapper extends FieldMapper {
|
||||||
return new BlockDocValuesReader.BooleansBlockLoader(name());
|
return new BlockDocValuesReader.BooleansBlockLoader(name());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isSyntheticSource) {
|
// Multi fields don't have fallback synthetic source.
|
||||||
|
if (isSyntheticSource && blContext.parentField(name()) == null) {
|
||||||
return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) {
|
return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) {
|
||||||
@Override
|
@Override
|
||||||
public Builder builder(BlockFactory factory, int expectedCount) {
|
public Builder builder(BlockFactory factory, int expectedCount) {
|
||||||
|
|
|
@ -948,7 +948,8 @@ public final class DateFieldMapper extends FieldMapper {
|
||||||
return new BlockDocValuesReader.LongsBlockLoader(name());
|
return new BlockDocValuesReader.LongsBlockLoader(name());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isSyntheticSource) {
|
// Multi fields don't have fallback synthetic source.
|
||||||
|
if (isSyntheticSource && blContext.parentField(name()) == null) {
|
||||||
return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) {
|
return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) {
|
||||||
@Override
|
@Override
|
||||||
public Builder builder(BlockFactory factory, int expectedCount) {
|
public Builder builder(BlockFactory factory, int expectedCount) {
|
||||||
|
|
|
@ -316,7 +316,7 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<GeoPoi
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parser that pretends to be the main document parser, but exposes the provided geohash regardless of how the geopoint was provided
|
* Parser that pretends to be the main document parser, but exposes the provided geohash regardless of how the geopoint was provided
|
||||||
* in the incoming document. We rely on the fact that consumers are only ever call {@link XContentParser#textOrNull()} and never
|
* in the incoming document. We rely on the fact that consumers only ever read text from the parser and never
|
||||||
* advance tokens, which is explicitly disallowed by this parser.
|
* advance tokens, which is explicitly disallowed by this parser.
|
||||||
*/
|
*/
|
||||||
static class GeoHashMultiFieldParser extends FilterXContentParserWrapper {
|
static class GeoHashMultiFieldParser extends FilterXContentParserWrapper {
|
||||||
|
@ -332,6 +332,11 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<GeoPoi
|
||||||
return value;
|
return value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String text() throws IOException {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Token currentToken() {
|
public Token currentToken() {
|
||||||
return Token.VALUE_STRING;
|
return Token.VALUE_STRING;
|
||||||
|
@ -545,8 +550,9 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<GeoPoi
|
||||||
// So we have two subcases:
|
// So we have two subcases:
|
||||||
// - doc_values are enabled - _ignored_source field does not exist since we have doc_values. We will use
|
// - doc_values are enabled - _ignored_source field does not exist since we have doc_values. We will use
|
||||||
// blockLoaderFromSource which reads "native" synthetic source.
|
// blockLoaderFromSource which reads "native" synthetic source.
|
||||||
// - doc_values are disabled - we know that _ignored_source field is present and use a special block loader.
|
// - doc_values are disabled - we know that _ignored_source field is present and use a special block loader unless it's a multi
|
||||||
if (isSyntheticSource && hasDocValues() == false) {
|
// field.
|
||||||
|
if (isSyntheticSource && hasDocValues() == false && blContext.parentField(name()) == null) {
|
||||||
return blockLoaderFromFallbackSyntheticSource(blContext);
|
return blockLoaderFromFallbackSyntheticSource(blContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -467,7 +467,8 @@ public class IpFieldMapper extends FieldMapper {
|
||||||
return new BlockStoredFieldsReader.BytesFromBytesRefsBlockLoader(name());
|
return new BlockStoredFieldsReader.BytesFromBytesRefsBlockLoader(name());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isSyntheticSource) {
|
// Multi fields don't have fallback synthetic source.
|
||||||
|
if (isSyntheticSource && blContext.parentField(name()) == null) {
|
||||||
return blockLoaderFromFallbackSyntheticSource(blContext);
|
return blockLoaderFromFallbackSyntheticSource(blContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -755,7 +755,8 @@ public final class KeywordFieldMapper extends FieldMapper {
|
||||||
return new BlockStoredFieldsReader.BytesFromBytesRefsBlockLoader(name());
|
return new BlockStoredFieldsReader.BytesFromBytesRefsBlockLoader(name());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isSyntheticSource) {
|
// Multi fields don't have fallback synthetic source.
|
||||||
|
if (isSyntheticSource && blContext.parentField(name()) == null) {
|
||||||
return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) {
|
return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) {
|
||||||
@Override
|
@Override
|
||||||
public Builder builder(BlockFactory factory, int expectedCount) {
|
public Builder builder(BlockFactory factory, int expectedCount) {
|
||||||
|
|
|
@ -1973,7 +1973,8 @@ public class NumberFieldMapper extends FieldMapper {
|
||||||
return type.blockLoaderFromDocValues(name());
|
return type.blockLoaderFromDocValues(name());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isSyntheticSource) {
|
// Multi fields don't have fallback synthetic source.
|
||||||
|
if (isSyntheticSource && blContext.parentField(name()) == null) {
|
||||||
return type.blockLoaderFromFallbackSyntheticSource(name(), nullValue, coerce);
|
return type.blockLoaderFromFallbackSyntheticSource(name(), nullValue, coerce);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -35,19 +35,19 @@ public class GeoPointFieldBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
var values = extractedFieldValues.values();
|
var values = extractedFieldValues.values();
|
||||||
|
|
||||||
var nullValue = switch (fieldMapping.get("null_value")) {
|
var nullValue = switch (fieldMapping.get("null_value")) {
|
||||||
case String s -> convert(s, null);
|
case String s -> convert(s, null, false);
|
||||||
case null -> null;
|
case null -> null;
|
||||||
default -> throw new IllegalStateException("Unexpected null_value format");
|
default -> throw new IllegalStateException("Unexpected null_value format");
|
||||||
};
|
};
|
||||||
|
|
||||||
if (params.preference() == MappedFieldType.FieldExtractPreference.DOC_VALUES && hasDocValues(fieldMapping, true)) {
|
if (params.preference() == MappedFieldType.FieldExtractPreference.DOC_VALUES && hasDocValues(fieldMapping, true)) {
|
||||||
if (values instanceof List<?> == false) {
|
if (values instanceof List<?> == false) {
|
||||||
var point = convert(values, nullValue);
|
var point = convert(values, nullValue, testContext.isMultifield());
|
||||||
return point != null ? point.getEncoded() : null;
|
return point != null ? point.getEncoded() : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
var resultList = ((List<Object>) values).stream()
|
var resultList = ((List<Object>) values).stream()
|
||||||
.map(v -> convert(v, nullValue))
|
.map(v -> convert(v, nullValue, testContext.isMultifield()))
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
.map(GeoPoint::getEncoded)
|
.map(GeoPoint::getEncoded)
|
||||||
.sorted()
|
.sorted()
|
||||||
|
@ -55,8 +55,9 @@ public class GeoPointFieldBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
return maybeFoldList(resultList);
|
return maybeFoldList(resultList);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// stored source is used
|
||||||
if (params.syntheticSource() == false) {
|
if (params.syntheticSource() == false) {
|
||||||
return exactValuesFromSource(values, nullValue);
|
return exactValuesFromSource(values, nullValue, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Usually implementation of block loader from source adjusts values read from source
|
// Usually implementation of block loader from source adjusts values read from source
|
||||||
|
@ -67,25 +68,25 @@ public class GeoPointFieldBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
// That is unless "synthetic_source_keep" forces fallback synthetic source again.
|
// That is unless "synthetic_source_keep" forces fallback synthetic source again.
|
||||||
|
|
||||||
if (testContext.forceFallbackSyntheticSource()) {
|
if (testContext.forceFallbackSyntheticSource()) {
|
||||||
return exactValuesFromSource(values, nullValue);
|
return exactValuesFromSource(values, nullValue, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
String syntheticSourceKeep = (String) fieldMapping.getOrDefault("synthetic_source_keep", "none");
|
String syntheticSourceKeep = (String) fieldMapping.getOrDefault("synthetic_source_keep", "none");
|
||||||
if (syntheticSourceKeep.equals("all")) {
|
if (syntheticSourceKeep.equals("all")) {
|
||||||
return exactValuesFromSource(values, nullValue);
|
return exactValuesFromSource(values, nullValue, false);
|
||||||
}
|
}
|
||||||
if (syntheticSourceKeep.equals("arrays") && extractedFieldValues.documentHasObjectArrays()) {
|
if (syntheticSourceKeep.equals("arrays") && extractedFieldValues.documentHasObjectArrays()) {
|
||||||
return exactValuesFromSource(values, nullValue);
|
return exactValuesFromSource(values, nullValue, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// synthetic source and doc_values are present
|
// synthetic source and doc_values are present
|
||||||
if (hasDocValues(fieldMapping, true)) {
|
if (hasDocValues(fieldMapping, true)) {
|
||||||
if (values instanceof List<?> == false) {
|
if (values instanceof List<?> == false) {
|
||||||
return toWKB(normalize(convert(values, nullValue)));
|
return toWKB(normalize(convert(values, nullValue, false)));
|
||||||
}
|
}
|
||||||
|
|
||||||
var resultList = ((List<Object>) values).stream()
|
var resultList = ((List<Object>) values).stream()
|
||||||
.map(v -> convert(v, nullValue))
|
.map(v -> convert(v, nullValue, false))
|
||||||
.filter(Objects::nonNull)
|
.filter(Objects::nonNull)
|
||||||
.sorted(Comparator.comparingLong(GeoPoint::getEncoded))
|
.sorted(Comparator.comparingLong(GeoPoint::getEncoded))
|
||||||
.map(p -> toWKB(normalize(p)))
|
.map(p -> toWKB(normalize(p)))
|
||||||
|
@ -94,16 +95,20 @@ public class GeoPointFieldBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// synthetic source but no doc_values so using fallback synthetic source
|
// synthetic source but no doc_values so using fallback synthetic source
|
||||||
return exactValuesFromSource(values, nullValue);
|
return exactValuesFromSource(values, nullValue, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private Object exactValuesFromSource(Object value, GeoPoint nullValue) {
|
private Object exactValuesFromSource(Object value, GeoPoint nullValue, boolean needsMultifieldAdjustment) {
|
||||||
if (value instanceof List<?> == false) {
|
if (value instanceof List<?> == false) {
|
||||||
return toWKB(convert(value, nullValue));
|
return toWKB(convert(value, nullValue, needsMultifieldAdjustment));
|
||||||
}
|
}
|
||||||
|
|
||||||
var resultList = ((List<Object>) value).stream().map(v -> convert(v, nullValue)).filter(Objects::nonNull).map(this::toWKB).toList();
|
var resultList = ((List<Object>) value).stream()
|
||||||
|
.map(v -> convert(v, nullValue, needsMultifieldAdjustment))
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.map(this::toWKB)
|
||||||
|
.toList();
|
||||||
return maybeFoldList(resultList);
|
return maybeFoldList(resultList);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -163,14 +168,17 @@ public class GeoPointFieldBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private GeoPoint convert(Object value, GeoPoint nullValue) {
|
private GeoPoint convert(Object value, GeoPoint nullValue, boolean needsMultifieldAdjustment) {
|
||||||
if (value == null) {
|
if (value == null) {
|
||||||
return nullValue;
|
if (nullValue == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return possiblyAdjustMultifieldValue(nullValue, needsMultifieldAdjustment);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (value instanceof String s) {
|
if (value instanceof String s) {
|
||||||
try {
|
try {
|
||||||
return new GeoPoint(s);
|
return possiblyAdjustMultifieldValue(new GeoPoint(s), needsMultifieldAdjustment);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -180,9 +188,9 @@ public class GeoPointFieldBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
if (m.get("type") != null) {
|
if (m.get("type") != null) {
|
||||||
var coordinates = (List<Double>) m.get("coordinates");
|
var coordinates = (List<Double>) m.get("coordinates");
|
||||||
// Order is GeoJSON is lon,lat
|
// Order is GeoJSON is lon,lat
|
||||||
return new GeoPoint(coordinates.get(1), coordinates.get(0));
|
return possiblyAdjustMultifieldValue(new GeoPoint(coordinates.get(1), coordinates.get(0)), needsMultifieldAdjustment);
|
||||||
} else {
|
} else {
|
||||||
return new GeoPoint((Double) m.get("lat"), (Double) m.get("lon"));
|
return possiblyAdjustMultifieldValue(new GeoPoint((Double) m.get("lat"), (Double) m.get("lon")), needsMultifieldAdjustment);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -190,6 +198,20 @@ public class GeoPointFieldBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private GeoPoint possiblyAdjustMultifieldValue(GeoPoint point, boolean isMultifield) {
|
||||||
|
// geo_point multifields are parsed from a geohash representation of the original point (GeoPointFieldMapper#index)
|
||||||
|
// and it's not exact.
|
||||||
|
// So if this is a multifield we need another adjustment here.
|
||||||
|
// Note that this does not apply to block loader from source because in this case we parse raw original values.
|
||||||
|
// Same thing happens with synthetic source since it is generated from the parent field data that didn't go through multi field
|
||||||
|
// parsing logic.
|
||||||
|
if (isMultifield) {
|
||||||
|
return point.resetFromString(point.geohash());
|
||||||
|
}
|
||||||
|
|
||||||
|
return point;
|
||||||
|
}
|
||||||
|
|
||||||
private GeoPoint normalize(GeoPoint point) {
|
private GeoPoint normalize(GeoPoint point) {
|
||||||
if (point == null) {
|
if (point == null) {
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -9,51 +9,108 @@
|
||||||
|
|
||||||
package org.elasticsearch.index.mapper.blockloader;
|
package org.elasticsearch.index.mapper.blockloader;
|
||||||
|
|
||||||
|
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||||
|
|
||||||
|
import org.elasticsearch.datageneration.DocumentGenerator;
|
||||||
import org.elasticsearch.datageneration.FieldType;
|
import org.elasticsearch.datageneration.FieldType;
|
||||||
|
import org.elasticsearch.datageneration.MappingGenerator;
|
||||||
|
import org.elasticsearch.datageneration.Template;
|
||||||
import org.elasticsearch.datageneration.datasource.DataSourceHandler;
|
import org.elasticsearch.datageneration.datasource.DataSourceHandler;
|
||||||
import org.elasticsearch.datageneration.datasource.DataSourceRequest;
|
import org.elasticsearch.datageneration.datasource.DataSourceRequest;
|
||||||
import org.elasticsearch.datageneration.datasource.DataSourceResponse;
|
import org.elasticsearch.datageneration.datasource.DataSourceResponse;
|
||||||
import org.elasticsearch.datageneration.datasource.DefaultMappingParametersHandler;
|
|
||||||
import org.elasticsearch.index.mapper.BlockLoaderTestCase;
|
import org.elasticsearch.index.mapper.BlockLoaderTestCase;
|
||||||
|
import org.elasticsearch.index.mapper.BlockLoaderTestRunner;
|
||||||
|
import org.elasticsearch.index.mapper.MapperServiceTestCase;
|
||||||
|
import org.elasticsearch.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.xcontent.XContentType;
|
||||||
|
|
||||||
import java.util.HashMap;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public class TextFieldWithParentBlockLoaderTests extends BlockLoaderTestCase {
|
import static org.elasticsearch.index.mapper.BlockLoaderTestCase.buildSpecification;
|
||||||
public TextFieldWithParentBlockLoaderTests(Params params) {
|
import static org.elasticsearch.index.mapper.BlockLoaderTestCase.hasDocValues;
|
||||||
// keyword because we need a keyword parent field
|
|
||||||
super(FieldType.KEYWORD.toString(), List.of(new DataSourceHandler() {
|
public class TextFieldWithParentBlockLoaderTests extends MapperServiceTestCase {
|
||||||
|
private final BlockLoaderTestCase.Params params;
|
||||||
|
private final BlockLoaderTestRunner runner;
|
||||||
|
|
||||||
|
@ParametersFactory(argumentFormatting = "preference=%s")
|
||||||
|
public static List<Object[]> args() {
|
||||||
|
return BlockLoaderTestCase.args();
|
||||||
|
}
|
||||||
|
|
||||||
|
public TextFieldWithParentBlockLoaderTests(BlockLoaderTestCase.Params params) {
|
||||||
|
this.params = params;
|
||||||
|
this.runner = new BlockLoaderTestRunner(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is similar to BlockLoaderTestCase#testBlockLoaderOfMultiField but has customizations required to properly test the case
|
||||||
|
// of text multi field in a keyword field.
|
||||||
|
public void testBlockLoaderOfParentField() throws IOException {
|
||||||
|
var template = new Template(Map.of("parent", new Template.Leaf("parent", FieldType.KEYWORD.toString())));
|
||||||
|
var specification = buildSpecification(List.of(new DataSourceHandler() {
|
||||||
@Override
|
@Override
|
||||||
public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceRequest.LeafMappingParametersGenerator request) {
|
public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceRequest.LeafMappingParametersGenerator request) {
|
||||||
assert request.fieldType().equals(FieldType.KEYWORD.toString());
|
// This is a bit tricky meta-logic.
|
||||||
|
// We want to customize mapping but to do this we need the mapping for the same field type
|
||||||
|
// so we use name to untangle this.
|
||||||
|
if (request.fieldName().equals("parent") == false) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
// We need to force multi field generation
|
|
||||||
return new DataSourceResponse.LeafMappingParametersGenerator(() -> {
|
return new DataSourceResponse.LeafMappingParametersGenerator(() -> {
|
||||||
var defaultSupplier = DefaultMappingParametersHandler.keywordMapping(
|
var dataSource = request.dataSource();
|
||||||
request,
|
|
||||||
DefaultMappingParametersHandler.commonMappingParameters()
|
var keywordParentMapping = dataSource.get(
|
||||||
);
|
new DataSourceRequest.LeafMappingParametersGenerator(
|
||||||
var mapping = defaultSupplier.get();
|
dataSource,
|
||||||
// we don't need this here
|
"_field",
|
||||||
mapping.remove("copy_to");
|
FieldType.KEYWORD.toString(),
|
||||||
|
request.eligibleCopyToFields(),
|
||||||
|
request.dynamicMapping()
|
||||||
|
)
|
||||||
|
).mappingGenerator().get();
|
||||||
|
|
||||||
|
var textMultiFieldMapping = dataSource.get(
|
||||||
|
new DataSourceRequest.LeafMappingParametersGenerator(
|
||||||
|
dataSource,
|
||||||
|
"_field",
|
||||||
|
FieldType.TEXT.toString(),
|
||||||
|
request.eligibleCopyToFields(),
|
||||||
|
request.dynamicMapping()
|
||||||
|
)
|
||||||
|
).mappingGenerator().get();
|
||||||
|
|
||||||
|
// we don't need this here
|
||||||
|
keywordParentMapping.remove("copy_to");
|
||||||
|
|
||||||
var textMultiFieldMappingSupplier = DefaultMappingParametersHandler.textMapping(request, new HashMap<>());
|
|
||||||
var textMultiFieldMapping = textMultiFieldMappingSupplier.get();
|
|
||||||
textMultiFieldMapping.put("type", "text");
|
textMultiFieldMapping.put("type", "text");
|
||||||
textMultiFieldMapping.remove("fields");
|
textMultiFieldMapping.remove("fields");
|
||||||
|
|
||||||
mapping.put("fields", Map.of("txt", textMultiFieldMapping));
|
keywordParentMapping.put("fields", Map.of("mf", textMultiFieldMapping));
|
||||||
|
|
||||||
return mapping;
|
return keywordParentMapping;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}), params);
|
}));
|
||||||
|
var mapping = new MappingGenerator(specification).generate(template);
|
||||||
|
var fieldMapping = mapping.lookup().get("parent");
|
||||||
|
|
||||||
|
var document = new DocumentGenerator(specification).generate(template, mapping);
|
||||||
|
var fieldValue = document.get("parent");
|
||||||
|
|
||||||
|
Object expected = expected(fieldMapping, fieldValue, new BlockLoaderTestCase.TestContext(false, true));
|
||||||
|
var mappingXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(mapping.raw());
|
||||||
|
var mapperService = params.syntheticSource()
|
||||||
|
? createSytheticSourceMapperService(mappingXContent)
|
||||||
|
: createMapperService(mappingXContent);
|
||||||
|
|
||||||
|
runner.runTest(mapperService, document, expected, "parent.mf");
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
|
private Object expected(Map<String, Object> fieldMapping, Object value, BlockLoaderTestCase.TestContext testContext) {
|
||||||
assert fieldMapping.containsKey("fields");
|
assert fieldMapping.containsKey("fields");
|
||||||
|
|
||||||
Object normalizer = fieldMapping.get("normalizer");
|
Object normalizer = fieldMapping.get("normalizer");
|
||||||
|
@ -66,12 +123,7 @@ public class TextFieldWithParentBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// we are using block loader of the text field itself
|
// we are using block loader of the text field itself
|
||||||
var textFieldMapping = (Map<String, Object>) ((Map<String, Object>) fieldMapping.get("fields")).get("txt");
|
var textFieldMapping = (Map<String, Object>) ((Map<String, Object>) fieldMapping.get("fields")).get("mf");
|
||||||
return TextFieldBlockLoaderTests.expectedValue(textFieldMapping, value, params, testContext);
|
return TextFieldBlockLoaderTests.expectedValue(textFieldMapping, value, params, testContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
protected String blockLoaderFieldName(String originalName) {
|
|
||||||
return originalName + ".txt";
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -104,6 +104,7 @@ public class MappingGenerator {
|
||||||
var mappingParametersGenerator = specification.dataSource()
|
var mappingParametersGenerator = specification.dataSource()
|
||||||
.get(
|
.get(
|
||||||
new DataSourceRequest.LeafMappingParametersGenerator(
|
new DataSourceRequest.LeafMappingParametersGenerator(
|
||||||
|
specification.dataSource(),
|
||||||
fieldName,
|
fieldName,
|
||||||
leaf.type(),
|
leaf.type(),
|
||||||
context.eligibleCopyToDestinations(),
|
context.eligibleCopyToDestinations(),
|
||||||
|
|
|
@ -199,6 +199,7 @@ public interface DataSourceRequest<TResponse extends DataSourceResponse> {
|
||||||
}
|
}
|
||||||
|
|
||||||
record LeafMappingParametersGenerator(
|
record LeafMappingParametersGenerator(
|
||||||
|
DataSource dataSource,
|
||||||
String fieldName,
|
String fieldName,
|
||||||
String fieldType,
|
String fieldType,
|
||||||
Set<String> eligibleCopyToFields,
|
Set<String> eligibleCopyToFields,
|
||||||
|
|
|
@ -36,34 +36,27 @@ public class DefaultMappingParametersHandler implements DataSourceHandler {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
var map = commonMappingParameters();
|
|
||||||
if (ESTestCase.randomBoolean()) {
|
|
||||||
map.put(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, ESTestCase.randomFrom("none", "arrays", "all"));
|
|
||||||
}
|
|
||||||
|
|
||||||
return new DataSourceResponse.LeafMappingParametersGenerator(switch (fieldType) {
|
return new DataSourceResponse.LeafMappingParametersGenerator(switch (fieldType) {
|
||||||
case KEYWORD -> keywordMapping(request, map);
|
case KEYWORD -> keywordMapping(request);
|
||||||
case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, HALF_FLOAT, UNSIGNED_LONG -> numberMapping(map, fieldType);
|
case LONG, INTEGER, SHORT, BYTE, DOUBLE, FLOAT, HALF_FLOAT, UNSIGNED_LONG -> numberMapping(fieldType);
|
||||||
case SCALED_FLOAT -> scaledFloatMapping(map);
|
case SCALED_FLOAT -> scaledFloatMapping();
|
||||||
case COUNTED_KEYWORD -> plain(Map.of("index", ESTestCase.randomBoolean()));
|
case COUNTED_KEYWORD -> countedKeywordMapping();
|
||||||
case BOOLEAN -> booleanMapping(map);
|
case BOOLEAN -> booleanMapping();
|
||||||
case DATE -> dateMapping(map);
|
case DATE -> dateMapping();
|
||||||
case GEO_POINT -> geoPointMapping(map);
|
case GEO_POINT -> geoPointMapping();
|
||||||
case TEXT -> textMapping(request, new HashMap<>());
|
case TEXT -> textMapping(request);
|
||||||
case IP -> ipMapping(map);
|
case IP -> ipMapping();
|
||||||
case CONSTANT_KEYWORD -> constantKeywordMapping(new HashMap<>());
|
case CONSTANT_KEYWORD -> constantKeywordMapping();
|
||||||
case WILDCARD -> wildcardMapping(new HashMap<>());
|
case WILDCARD -> wildcardMapping();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private Supplier<Map<String, Object>> plain(Map<String, Object> injected) {
|
private Supplier<Map<String, Object>> numberMapping(FieldType fieldType) {
|
||||||
return () -> injected;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Supplier<Map<String, Object>> numberMapping(Map<String, Object> injected, FieldType fieldType) {
|
|
||||||
return () -> {
|
return () -> {
|
||||||
|
var mapping = commonMappingParameters();
|
||||||
|
|
||||||
if (ESTestCase.randomBoolean()) {
|
if (ESTestCase.randomBoolean()) {
|
||||||
injected.put("ignore_malformed", ESTestCase.randomBoolean());
|
mapping.put("ignore_malformed", ESTestCase.randomBoolean());
|
||||||
}
|
}
|
||||||
if (ESTestCase.randomDouble() <= 0.2) {
|
if (ESTestCase.randomDouble() <= 0.2) {
|
||||||
Number value = switch (fieldType) {
|
Number value = switch (fieldType) {
|
||||||
|
@ -77,18 +70,17 @@ public class DefaultMappingParametersHandler implements DataSourceHandler {
|
||||||
default -> throw new IllegalStateException("Unexpected field type");
|
default -> throw new IllegalStateException("Unexpected field type");
|
||||||
};
|
};
|
||||||
|
|
||||||
injected.put("null_value", value);
|
mapping.put("null_value", value);
|
||||||
}
|
}
|
||||||
|
|
||||||
return injected;
|
return mapping;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Supplier<Map<String, Object>> keywordMapping(
|
private Supplier<Map<String, Object>> keywordMapping(DataSourceRequest.LeafMappingParametersGenerator request) {
|
||||||
DataSourceRequest.LeafMappingParametersGenerator request,
|
|
||||||
Map<String, Object> injected
|
|
||||||
) {
|
|
||||||
return () -> {
|
return () -> {
|
||||||
|
var mapping = commonMappingParameters();
|
||||||
|
|
||||||
// Inject copy_to sometimes but reflect that it is not widely used in reality.
|
// Inject copy_to sometimes but reflect that it is not widely used in reality.
|
||||||
// We only add copy_to to keywords because we get into trouble with numeric fields that are copied to dynamic fields.
|
// We only add copy_to to keywords because we get into trouble with numeric fields that are copied to dynamic fields.
|
||||||
// If first copied value is numeric, dynamic field is created with numeric field type and then copy of text values fail.
|
// If first copied value is numeric, dynamic field is created with numeric field type and then copy of text values fail.
|
||||||
|
@ -100,69 +92,79 @@ public class DefaultMappingParametersHandler implements DataSourceHandler {
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
if (options.isEmpty() == false) {
|
if (options.isEmpty() == false) {
|
||||||
injected.put("copy_to", ESTestCase.randomFrom(options));
|
mapping.put("copy_to", ESTestCase.randomFrom(options));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ESTestCase.randomDouble() <= 0.2) {
|
if (ESTestCase.randomDouble() <= 0.2) {
|
||||||
injected.put("ignore_above", ESTestCase.randomIntBetween(1, 100));
|
mapping.put("ignore_above", ESTestCase.randomIntBetween(1, 100));
|
||||||
}
|
}
|
||||||
if (ESTestCase.randomDouble() <= 0.2) {
|
if (ESTestCase.randomDouble() <= 0.2) {
|
||||||
injected.put("null_value", ESTestCase.randomAlphaOfLengthBetween(0, 10));
|
mapping.put("null_value", ESTestCase.randomAlphaOfLengthBetween(0, 10));
|
||||||
}
|
}
|
||||||
|
|
||||||
return injected;
|
return mapping;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private Supplier<Map<String, Object>> scaledFloatMapping(Map<String, Object> injected) {
|
private Supplier<Map<String, Object>> scaledFloatMapping() {
|
||||||
return () -> {
|
return () -> {
|
||||||
injected.put("scaling_factor", ESTestCase.randomFrom(10, 1000, 100000, 100.5));
|
var mapping = commonMappingParameters();
|
||||||
|
|
||||||
|
mapping.put("scaling_factor", ESTestCase.randomFrom(10, 1000, 100000, 100.5));
|
||||||
|
|
||||||
if (ESTestCase.randomDouble() <= 0.2) {
|
if (ESTestCase.randomDouble() <= 0.2) {
|
||||||
injected.put("null_value", ESTestCase.randomDouble());
|
mapping.put("null_value", ESTestCase.randomDouble());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ESTestCase.randomBoolean()) {
|
if (ESTestCase.randomBoolean()) {
|
||||||
injected.put("ignore_malformed", ESTestCase.randomBoolean());
|
mapping.put("ignore_malformed", ESTestCase.randomBoolean());
|
||||||
}
|
}
|
||||||
|
|
||||||
return injected;
|
return mapping;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private Supplier<Map<String, Object>> booleanMapping(Map<String, Object> injected) {
|
private Supplier<Map<String, Object>> countedKeywordMapping() {
|
||||||
|
return () -> Map.of("index", ESTestCase.randomBoolean());
|
||||||
|
}
|
||||||
|
|
||||||
|
private Supplier<Map<String, Object>> booleanMapping() {
|
||||||
return () -> {
|
return () -> {
|
||||||
|
var mapping = commonMappingParameters();
|
||||||
|
|
||||||
if (ESTestCase.randomDouble() <= 0.2) {
|
if (ESTestCase.randomDouble() <= 0.2) {
|
||||||
injected.put("null_value", ESTestCase.randomFrom(true, false, "true", "false"));
|
mapping.put("null_value", ESTestCase.randomFrom(true, false, "true", "false"));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ESTestCase.randomBoolean()) {
|
if (ESTestCase.randomBoolean()) {
|
||||||
injected.put("ignore_malformed", ESTestCase.randomBoolean());
|
mapping.put("ignore_malformed", ESTestCase.randomBoolean());
|
||||||
}
|
}
|
||||||
|
|
||||||
return injected;
|
return mapping;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
// just a custom format, specific format does not matter
|
// just a custom format, specific format does not matter
|
||||||
private static final String FORMAT = "yyyy_MM_dd_HH_mm_ss_n";
|
private static final String FORMAT = "yyyy_MM_dd_HH_mm_ss_n";
|
||||||
|
|
||||||
private Supplier<Map<String, Object>> dateMapping(Map<String, Object> injected) {
|
private Supplier<Map<String, Object>> dateMapping() {
|
||||||
return () -> {
|
return () -> {
|
||||||
|
var mapping = commonMappingParameters();
|
||||||
|
|
||||||
String format = null;
|
String format = null;
|
||||||
if (ESTestCase.randomBoolean()) {
|
if (ESTestCase.randomBoolean()) {
|
||||||
format = FORMAT;
|
format = FORMAT;
|
||||||
injected.put("format", format);
|
mapping.put("format", format);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ESTestCase.randomDouble() <= 0.2) {
|
if (ESTestCase.randomDouble() <= 0.2) {
|
||||||
var instant = ESTestCase.randomInstantBetween(Instant.parse("2300-01-01T00:00:00Z"), Instant.parse("2350-01-01T00:00:00Z"));
|
var instant = ESTestCase.randomInstantBetween(Instant.parse("2300-01-01T00:00:00Z"), Instant.parse("2350-01-01T00:00:00Z"));
|
||||||
|
|
||||||
if (format == null) {
|
if (format == null) {
|
||||||
injected.put("null_value", instant.toEpochMilli());
|
mapping.put("null_value", instant.toEpochMilli());
|
||||||
} else {
|
} else {
|
||||||
injected.put(
|
mapping.put(
|
||||||
"null_value",
|
"null_value",
|
||||||
DateTimeFormatter.ofPattern(format, Locale.ROOT).withZone(ZoneId.from(ZoneOffset.UTC)).format(instant)
|
DateTimeFormatter.ofPattern(format, Locale.ROOT).withZone(ZoneId.from(ZoneOffset.UTC)).format(instant)
|
||||||
);
|
);
|
||||||
|
@ -170,82 +172,89 @@ public class DefaultMappingParametersHandler implements DataSourceHandler {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ESTestCase.randomBoolean()) {
|
if (ESTestCase.randomBoolean()) {
|
||||||
injected.put("ignore_malformed", ESTestCase.randomBoolean());
|
mapping.put("ignore_malformed", ESTestCase.randomBoolean());
|
||||||
}
|
}
|
||||||
|
|
||||||
return injected;
|
return mapping;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private Supplier<Map<String, Object>> geoPointMapping(Map<String, Object> injected) {
|
private Supplier<Map<String, Object>> geoPointMapping() {
|
||||||
return () -> {
|
return () -> {
|
||||||
|
var mapping = commonMappingParameters();
|
||||||
|
|
||||||
if (ESTestCase.randomDouble() <= 0.2) {
|
if (ESTestCase.randomDouble() <= 0.2) {
|
||||||
var point = GeometryTestUtils.randomPoint(false);
|
var point = GeometryTestUtils.randomPoint(false);
|
||||||
injected.put("null_value", WellKnownText.toWKT(point));
|
mapping.put("null_value", WellKnownText.toWKT(point));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ESTestCase.randomBoolean()) {
|
if (ESTestCase.randomBoolean()) {
|
||||||
injected.put("ignore_malformed", ESTestCase.randomBoolean());
|
mapping.put("ignore_malformed", ESTestCase.randomBoolean());
|
||||||
}
|
}
|
||||||
|
|
||||||
return injected;
|
return mapping;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Supplier<Map<String, Object>> textMapping(
|
private Supplier<Map<String, Object>> textMapping(DataSourceRequest.LeafMappingParametersGenerator request) {
|
||||||
DataSourceRequest.LeafMappingParametersGenerator request,
|
|
||||||
Map<String, Object> injected
|
|
||||||
) {
|
|
||||||
return () -> {
|
return () -> {
|
||||||
injected.put("store", ESTestCase.randomBoolean());
|
var mapping = new HashMap<String, Object>();
|
||||||
injected.put("index", ESTestCase.randomBoolean());
|
|
||||||
|
mapping.put("store", ESTestCase.randomBoolean());
|
||||||
|
mapping.put("index", ESTestCase.randomBoolean());
|
||||||
|
|
||||||
if (ESTestCase.randomDouble() <= 0.1) {
|
if (ESTestCase.randomDouble() <= 0.1) {
|
||||||
var keywordMultiFieldMapping = keywordMapping(request, commonMappingParameters()).get();
|
var keywordMultiFieldMapping = keywordMapping(request).get();
|
||||||
keywordMultiFieldMapping.put("type", "keyword");
|
keywordMultiFieldMapping.put("type", "keyword");
|
||||||
keywordMultiFieldMapping.remove("copy_to");
|
keywordMultiFieldMapping.remove("copy_to");
|
||||||
|
|
||||||
injected.put("fields", Map.of("kwd", keywordMultiFieldMapping));
|
mapping.put("fields", Map.of("kwd", keywordMultiFieldMapping));
|
||||||
}
|
}
|
||||||
|
|
||||||
return injected;
|
return mapping;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private Supplier<Map<String, Object>> ipMapping(Map<String, Object> injected) {
|
private Supplier<Map<String, Object>> ipMapping() {
|
||||||
return () -> {
|
return () -> {
|
||||||
|
var mapping = commonMappingParameters();
|
||||||
|
|
||||||
if (ESTestCase.randomDouble() <= 0.2) {
|
if (ESTestCase.randomDouble() <= 0.2) {
|
||||||
injected.put("null_value", NetworkAddress.format(ESTestCase.randomIp(ESTestCase.randomBoolean())));
|
mapping.put("null_value", NetworkAddress.format(ESTestCase.randomIp(ESTestCase.randomBoolean())));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ESTestCase.randomBoolean()) {
|
if (ESTestCase.randomBoolean()) {
|
||||||
injected.put("ignore_malformed", ESTestCase.randomBoolean());
|
mapping.put("ignore_malformed", ESTestCase.randomBoolean());
|
||||||
}
|
}
|
||||||
|
|
||||||
return injected;
|
return mapping;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private Supplier<Map<String, Object>> constantKeywordMapping(Map<String, Object> injected) {
|
private Supplier<Map<String, Object>> constantKeywordMapping() {
|
||||||
return () -> {
|
return () -> {
|
||||||
|
var mapping = new HashMap<String, Object>();
|
||||||
|
|
||||||
// value is optional and can be set from the first document
|
// value is optional and can be set from the first document
|
||||||
// we don't cover this case here
|
// we don't cover this case here
|
||||||
injected.put("value", ESTestCase.randomAlphaOfLengthBetween(0, 10));
|
mapping.put("value", ESTestCase.randomAlphaOfLengthBetween(0, 10));
|
||||||
|
|
||||||
return injected;
|
return mapping;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
private Supplier<Map<String, Object>> wildcardMapping(Map<String, Object> injected) {
|
private Supplier<Map<String, Object>> wildcardMapping() {
|
||||||
return () -> {
|
return () -> {
|
||||||
|
var mapping = new HashMap<String, Object>();
|
||||||
|
|
||||||
if (ESTestCase.randomDouble() <= 0.2) {
|
if (ESTestCase.randomDouble() <= 0.2) {
|
||||||
injected.put("ignore_above", ESTestCase.randomIntBetween(1, 100));
|
mapping.put("ignore_above", ESTestCase.randomIntBetween(1, 100));
|
||||||
}
|
}
|
||||||
if (ESTestCase.randomDouble() <= 0.2) {
|
if (ESTestCase.randomDouble() <= 0.2) {
|
||||||
injected.put("null_value", ESTestCase.randomAlphaOfLengthBetween(0, 10));
|
mapping.put("null_value", ESTestCase.randomAlphaOfLengthBetween(0, 10));
|
||||||
}
|
}
|
||||||
|
|
||||||
return injected;
|
return mapping;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -254,6 +263,11 @@ public class DefaultMappingParametersHandler implements DataSourceHandler {
|
||||||
map.put("store", ESTestCase.randomBoolean());
|
map.put("store", ESTestCase.randomBoolean());
|
||||||
map.put("index", ESTestCase.randomBoolean());
|
map.put("index", ESTestCase.randomBoolean());
|
||||||
map.put("doc_values", ESTestCase.randomBoolean());
|
map.put("doc_values", ESTestCase.randomBoolean());
|
||||||
|
|
||||||
|
if (ESTestCase.randomBoolean()) {
|
||||||
|
map.put(Mapper.SYNTHETIC_SOURCE_KEEP_PARAM, ESTestCase.randomFrom("none", "arrays", "all"));
|
||||||
|
}
|
||||||
|
|
||||||
return map;
|
return map;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,24 +11,13 @@ package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||||
|
|
||||||
import org.apache.lucene.index.DirectoryReader;
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
|
||||||
import org.apache.lucene.store.Directory;
|
|
||||||
import org.apache.lucene.tests.index.RandomIndexWriter;
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
|
||||||
import org.elasticsearch.datageneration.DataGeneratorSpecification;
|
import org.elasticsearch.datageneration.DataGeneratorSpecification;
|
||||||
import org.elasticsearch.datageneration.DocumentGenerator;
|
import org.elasticsearch.datageneration.DocumentGenerator;
|
||||||
import org.elasticsearch.datageneration.Mapping;
|
|
||||||
import org.elasticsearch.datageneration.MappingGenerator;
|
import org.elasticsearch.datageneration.MappingGenerator;
|
||||||
import org.elasticsearch.datageneration.Template;
|
import org.elasticsearch.datageneration.Template;
|
||||||
import org.elasticsearch.datageneration.datasource.DataSourceHandler;
|
import org.elasticsearch.datageneration.datasource.DataSourceHandler;
|
||||||
import org.elasticsearch.datageneration.datasource.DataSourceRequest;
|
import org.elasticsearch.datageneration.datasource.DataSourceRequest;
|
||||||
import org.elasticsearch.datageneration.datasource.DataSourceResponse;
|
import org.elasticsearch.datageneration.datasource.DataSourceResponse;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
|
||||||
import org.elasticsearch.index.fieldvisitor.StoredFieldLoader;
|
|
||||||
import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
|
|
||||||
import org.elasticsearch.search.fetch.StoredFieldsSpec;
|
|
||||||
import org.elasticsearch.search.lookup.SearchLookup;
|
|
||||||
import org.elasticsearch.xcontent.XContentBuilder;
|
import org.elasticsearch.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.xcontent.XContentType;
|
import org.elasticsearch.xcontent.XContentType;
|
||||||
|
|
||||||
|
@ -38,7 +27,6 @@ import java.util.Collection;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
public abstract class BlockLoaderTestCase extends MapperServiceTestCase {
|
public abstract class BlockLoaderTestCase extends MapperServiceTestCase {
|
||||||
|
@ -60,46 +48,26 @@ public abstract class BlockLoaderTestCase extends MapperServiceTestCase {
|
||||||
|
|
||||||
public record Params(boolean syntheticSource, MappedFieldType.FieldExtractPreference preference) {}
|
public record Params(boolean syntheticSource, MappedFieldType.FieldExtractPreference preference) {}
|
||||||
|
|
||||||
public record TestContext(boolean forceFallbackSyntheticSource) {}
|
public record TestContext(boolean forceFallbackSyntheticSource, boolean isMultifield) {}
|
||||||
|
|
||||||
private final String fieldType;
|
private final String fieldType;
|
||||||
protected final Params params;
|
protected final Params params;
|
||||||
|
private final Collection<DataSourceHandler> customDataSourceHandlers;
|
||||||
|
private final BlockLoaderTestRunner runner;
|
||||||
|
|
||||||
private final String fieldName;
|
private final String fieldName;
|
||||||
private final MappingGenerator mappingGenerator;
|
|
||||||
private final DocumentGenerator documentGenerator;
|
|
||||||
|
|
||||||
protected BlockLoaderTestCase(String fieldType, Params params) {
|
protected BlockLoaderTestCase(String fieldType, Params params) {
|
||||||
this(fieldType, List.of(), params);
|
this(fieldType, List.of(), params);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected BlockLoaderTestCase(String fieldType, Collection<DataSourceHandler> customHandlers, Params params) {
|
protected BlockLoaderTestCase(String fieldType, Collection<DataSourceHandler> customDataSourceHandlers, Params params) {
|
||||||
this.fieldType = fieldType;
|
this.fieldType = fieldType;
|
||||||
this.params = params;
|
this.params = params;
|
||||||
|
this.customDataSourceHandlers = customDataSourceHandlers;
|
||||||
|
this.runner = new BlockLoaderTestRunner(params);
|
||||||
|
|
||||||
this.fieldName = randomAlphaOfLengthBetween(5, 10);
|
this.fieldName = randomAlphaOfLengthBetween(5, 10);
|
||||||
|
|
||||||
var specification = DataGeneratorSpecification.builder()
|
|
||||||
.withFullyDynamicMapping(false)
|
|
||||||
// Disable dynamic mapping and disabled objects
|
|
||||||
.withDataSourceHandlers(List.of(new DataSourceHandler() {
|
|
||||||
@Override
|
|
||||||
public DataSourceResponse.DynamicMappingGenerator handle(DataSourceRequest.DynamicMappingGenerator request) {
|
|
||||||
return new DataSourceResponse.DynamicMappingGenerator(isObject -> false);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public DataSourceResponse.ObjectMappingParametersGenerator handle(
|
|
||||||
DataSourceRequest.ObjectMappingParametersGenerator request
|
|
||||||
) {
|
|
||||||
return new DataSourceResponse.ObjectMappingParametersGenerator(HashMap::new); // just defaults
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
.withDataSourceHandlers(customHandlers)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
this.mappingGenerator = new MappingGenerator(specification);
|
|
||||||
this.documentGenerator = new DocumentGenerator(specification);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -114,9 +82,19 @@ public abstract class BlockLoaderTestCase extends MapperServiceTestCase {
|
||||||
|
|
||||||
public void testBlockLoader() throws IOException {
|
public void testBlockLoader() throws IOException {
|
||||||
var template = new Template(Map.of(fieldName, new Template.Leaf(fieldName, fieldType)));
|
var template = new Template(Map.of(fieldName, new Template.Leaf(fieldName, fieldType)));
|
||||||
var mapping = mappingGenerator.generate(template);
|
var specification = buildSpecification(customDataSourceHandlers);
|
||||||
|
|
||||||
runTest(template, mapping, fieldName, new TestContext(false));
|
var mapping = new MappingGenerator(specification).generate(template);
|
||||||
|
var document = new DocumentGenerator(specification).generate(template, mapping);
|
||||||
|
|
||||||
|
Object expected = expected(mapping.lookup().get(fieldName), getFieldValue(document, fieldName), new TestContext(false, false));
|
||||||
|
|
||||||
|
var mappingXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(mapping.raw());
|
||||||
|
var mapperService = params.syntheticSource
|
||||||
|
? createSytheticSourceMapperService(mappingXContent)
|
||||||
|
: createMapperService(mappingXContent);
|
||||||
|
|
||||||
|
runner.runTest(mapperService, document, expected, fieldName);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
|
@ -140,9 +118,11 @@ public abstract class BlockLoaderTestCase extends MapperServiceTestCase {
|
||||||
currentLevel.put(fieldName, new Template.Leaf(fieldName, fieldType));
|
currentLevel.put(fieldName, new Template.Leaf(fieldName, fieldType));
|
||||||
var template = new Template(top);
|
var template = new Template(top);
|
||||||
|
|
||||||
var mapping = mappingGenerator.generate(template);
|
var specification = buildSpecification(customDataSourceHandlers);
|
||||||
|
var mapping = new MappingGenerator(specification).generate(template);
|
||||||
|
var document = new DocumentGenerator(specification).generate(template, mapping);
|
||||||
|
|
||||||
TestContext testContext = new TestContext(false);
|
TestContext testContext = new TestContext(false, false);
|
||||||
|
|
||||||
if (params.syntheticSource && randomBoolean()) {
|
if (params.syntheticSource && randomBoolean()) {
|
||||||
// force fallback synthetic source in the hierarchy
|
// force fallback synthetic source in the hierarchy
|
||||||
|
@ -150,29 +130,119 @@ public abstract class BlockLoaderTestCase extends MapperServiceTestCase {
|
||||||
var topLevelMapping = (Map<String, Object>) ((Map<String, Object>) docMapping.get("properties")).get("top");
|
var topLevelMapping = (Map<String, Object>) ((Map<String, Object>) docMapping.get("properties")).get("top");
|
||||||
topLevelMapping.put("synthetic_source_keep", "all");
|
topLevelMapping.put("synthetic_source_keep", "all");
|
||||||
|
|
||||||
testContext = new TestContext(true);
|
testContext = new TestContext(true, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
runTest(template, mapping, fullFieldName.toString(), testContext);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void runTest(Template template, Mapping mapping, String fieldName, TestContext testContext) throws IOException {
|
|
||||||
var mappingXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(mapping.raw());
|
var mappingXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(mapping.raw());
|
||||||
|
|
||||||
var mapperService = params.syntheticSource
|
var mapperService = params.syntheticSource
|
||||||
? createSytheticSourceMapperService(mappingXContent)
|
? createSytheticSourceMapperService(mappingXContent)
|
||||||
: createMapperService(mappingXContent);
|
: createMapperService(mappingXContent);
|
||||||
|
|
||||||
var document = documentGenerator.generate(template, mapping);
|
Object expected = expected(
|
||||||
var documentXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(document);
|
mapping.lookup().get(fullFieldName.toString()),
|
||||||
|
getFieldValue(document, fullFieldName.toString()),
|
||||||
|
testContext
|
||||||
|
);
|
||||||
|
|
||||||
Object expected = expected(mapping.lookup().get(fieldName), getFieldValue(document, fieldName), testContext);
|
runner.runTest(mapperService, document, expected, fullFieldName.toString());
|
||||||
Object blockLoaderResult = setupAndInvokeBlockLoader(mapperService, documentXContent, blockLoaderFieldName(fieldName));
|
}
|
||||||
assertEquals(expected, blockLoaderResult);
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
public void testBlockLoaderOfMultiField() throws IOException {
|
||||||
|
// We are going to have a parent field and a multi field of the same type in order to be sure we can index data.
|
||||||
|
// Then we'll test block loader of the multi field.
|
||||||
|
var template = new Template(Map.of("parent", new Template.Leaf("parent", fieldType)));
|
||||||
|
|
||||||
|
var customHandlers = new ArrayList<DataSourceHandler>();
|
||||||
|
customHandlers.add(new DataSourceHandler() {
|
||||||
|
@Override
|
||||||
|
public DataSourceResponse.LeafMappingParametersGenerator handle(DataSourceRequest.LeafMappingParametersGenerator request) {
|
||||||
|
// This is a bit tricky meta-logic.
|
||||||
|
// We want to customize mapping but to do this we need the mapping for the same field type
|
||||||
|
// so we use name to untangle this.
|
||||||
|
if (request.fieldName().equals("parent") == false) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new DataSourceResponse.LeafMappingParametersGenerator(() -> {
|
||||||
|
var dataSource = request.dataSource();
|
||||||
|
|
||||||
|
// We need parent field to have the same mapping as multi field due to different behavior caused f.e. by
|
||||||
|
// ignore_malformed.
|
||||||
|
// The name here should be different from "parent".
|
||||||
|
var mapping = dataSource.get(
|
||||||
|
new DataSourceRequest.LeafMappingParametersGenerator(
|
||||||
|
dataSource,
|
||||||
|
"_field",
|
||||||
|
request.fieldType(),
|
||||||
|
request.eligibleCopyToFields(),
|
||||||
|
request.dynamicMapping()
|
||||||
|
)
|
||||||
|
).mappingGenerator().get();
|
||||||
|
|
||||||
|
var parentMapping = new HashMap<>(mapping);
|
||||||
|
var multiFieldMapping = new HashMap<>(mapping);
|
||||||
|
|
||||||
|
multiFieldMapping.put("type", fieldType);
|
||||||
|
multiFieldMapping.remove("fields");
|
||||||
|
|
||||||
|
parentMapping.put("fields", Map.of("mf", multiFieldMapping));
|
||||||
|
|
||||||
|
return parentMapping;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
customHandlers.addAll(customDataSourceHandlers);
|
||||||
|
var specification = buildSpecification(customHandlers);
|
||||||
|
var mapping = new MappingGenerator(specification).generate(template);
|
||||||
|
var fieldMapping = (Map<String, Object>) ((Map<String, Object>) mapping.lookup().get("parent").get("fields")).get("mf");
|
||||||
|
|
||||||
|
var document = new DocumentGenerator(specification).generate(template, mapping);
|
||||||
|
|
||||||
|
Object expected = expected(fieldMapping, getFieldValue(document, "parent"), new TestContext(false, true));
|
||||||
|
var mappingXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(mapping.raw());
|
||||||
|
var mapperService = params.syntheticSource
|
||||||
|
? createSytheticSourceMapperService(mappingXContent)
|
||||||
|
: createMapperService(mappingXContent);
|
||||||
|
|
||||||
|
runner.runTest(mapperService, document, expected, "parent.mf");
|
||||||
|
}
|
||||||
|
|
||||||
|
public static DataGeneratorSpecification buildSpecification(Collection<DataSourceHandler> customHandlers) {
|
||||||
|
return DataGeneratorSpecification.builder()
|
||||||
|
.withFullyDynamicMapping(false)
|
||||||
|
// Disable dynamic mapping and disabled objects
|
||||||
|
.withDataSourceHandlers(List.of(new DataSourceHandler() {
|
||||||
|
@Override
|
||||||
|
public DataSourceResponse.DynamicMappingGenerator handle(DataSourceRequest.DynamicMappingGenerator request) {
|
||||||
|
return new DataSourceResponse.DynamicMappingGenerator(isObject -> false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DataSourceResponse.ObjectMappingParametersGenerator handle(
|
||||||
|
DataSourceRequest.ObjectMappingParametersGenerator request
|
||||||
|
) {
|
||||||
|
return new DataSourceResponse.ObjectMappingParametersGenerator(HashMap::new); // just defaults
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.withDataSourceHandlers(customHandlers)
|
||||||
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext);
|
protected abstract Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext);
|
||||||
|
|
||||||
|
protected static Object maybeFoldList(List<?> list) {
|
||||||
|
if (list.isEmpty()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (list.size() == 1) {
|
||||||
|
return list.get(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
return list;
|
||||||
|
}
|
||||||
|
|
||||||
protected Object getFieldValue(Map<String, Object> document, String fieldName) {
|
protected Object getFieldValue(Map<String, Object> document, String fieldName) {
|
||||||
var rawValues = new ArrayList<>();
|
var rawValues = new ArrayList<>();
|
||||||
processLevel(document, fieldName, rawValues);
|
processLevel(document, fieldName, rawValues);
|
||||||
|
@ -204,128 +274,7 @@ public abstract class BlockLoaderTestCase extends MapperServiceTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static Object maybeFoldList(List<?> list) {
|
public static boolean hasDocValues(Map<String, Object> fieldMapping, boolean defaultValue) {
|
||||||
if (list.isEmpty()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (list.size() == 1) {
|
|
||||||
return list.get(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
return list;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
Allows to change the field name used to obtain a block loader.
|
|
||||||
Useful f.e. to test block loaders of multi fields.
|
|
||||||
*/
|
|
||||||
protected String blockLoaderFieldName(String originalName) {
|
|
||||||
return originalName;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Object setupAndInvokeBlockLoader(MapperService mapperService, XContentBuilder document, String fieldName) throws IOException {
|
|
||||||
try (Directory directory = newDirectory()) {
|
|
||||||
RandomIndexWriter iw = new RandomIndexWriter(random(), directory);
|
|
||||||
|
|
||||||
var source = new SourceToParse(
|
|
||||||
"1",
|
|
||||||
BytesReference.bytes(document),
|
|
||||||
XContentType.JSON,
|
|
||||||
null,
|
|
||||||
Map.of(),
|
|
||||||
true,
|
|
||||||
XContentMeteringParserDecorator.NOOP
|
|
||||||
);
|
|
||||||
LuceneDocument doc = mapperService.documentMapper().parse(source).rootDoc();
|
|
||||||
|
|
||||||
iw.addDocument(doc);
|
|
||||||
iw.close();
|
|
||||||
|
|
||||||
try (DirectoryReader reader = DirectoryReader.open(directory)) {
|
|
||||||
LeafReaderContext context = reader.leaves().get(0);
|
|
||||||
return load(createBlockLoader(mapperService, fieldName), context, mapperService);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Object load(BlockLoader blockLoader, LeafReaderContext context, MapperService mapperService) throws IOException {
|
|
||||||
// `columnAtATimeReader` is tried first, we mimic `ValuesSourceReaderOperator`
|
|
||||||
var columnAtATimeReader = blockLoader.columnAtATimeReader(context);
|
|
||||||
if (columnAtATimeReader != null) {
|
|
||||||
var block = (TestBlock) columnAtATimeReader.read(TestBlock.factory(context.reader().numDocs()), TestBlock.docs(0));
|
|
||||||
if (block.size() == 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return block.get(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
StoredFieldsSpec storedFieldsSpec = blockLoader.rowStrideStoredFieldSpec();
|
|
||||||
SourceLoader.Leaf leafSourceLoader = null;
|
|
||||||
if (storedFieldsSpec.requiresSource()) {
|
|
||||||
var sourceLoader = mapperService.mappingLookup().newSourceLoader(null, SourceFieldMetrics.NOOP);
|
|
||||||
leafSourceLoader = sourceLoader.leaf(context.reader(), null);
|
|
||||||
storedFieldsSpec = storedFieldsSpec.merge(
|
|
||||||
new StoredFieldsSpec(true, storedFieldsSpec.requiresMetadata(), sourceLoader.requiredStoredFields())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
BlockLoaderStoredFieldsFromLeafLoader storedFieldsLoader = new BlockLoaderStoredFieldsFromLeafLoader(
|
|
||||||
StoredFieldLoader.fromSpec(storedFieldsSpec).getLoader(context, null),
|
|
||||||
leafSourceLoader
|
|
||||||
);
|
|
||||||
storedFieldsLoader.advanceTo(0);
|
|
||||||
|
|
||||||
BlockLoader.Builder builder = blockLoader.builder(TestBlock.factory(context.reader().numDocs()), 1);
|
|
||||||
blockLoader.rowStrideReader(context).read(0, storedFieldsLoader, builder);
|
|
||||||
var block = (TestBlock) builder.build();
|
|
||||||
if (block.size() == 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
return block.get(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
private BlockLoader createBlockLoader(MapperService mapperService, String fieldName) {
|
|
||||||
SearchLookup searchLookup = new SearchLookup(mapperService.mappingLookup().fieldTypesLookup()::get, null, null);
|
|
||||||
|
|
||||||
return mapperService.fieldType(fieldName).blockLoader(new MappedFieldType.BlockLoaderContext() {
|
|
||||||
@Override
|
|
||||||
public String indexName() {
|
|
||||||
return mapperService.getIndexSettings().getIndex().getName();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public IndexSettings indexSettings() {
|
|
||||||
return mapperService.getIndexSettings();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public MappedFieldType.FieldExtractPreference fieldExtractPreference() {
|
|
||||||
return params.preference;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public SearchLookup lookup() {
|
|
||||||
return searchLookup;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<String> sourcePaths(String name) {
|
|
||||||
return mapperService.mappingLookup().sourcePaths(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String parentField(String field) {
|
|
||||||
return mapperService.mappingLookup().parentField(field);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() {
|
|
||||||
return (FieldNamesFieldMapper.FieldNamesFieldType) mapperService.fieldType(FieldNamesFieldMapper.NAME);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
protected static boolean hasDocValues(Map<String, Object> fieldMapping, boolean defaultValue) {
|
|
||||||
return (boolean) fieldMapping.getOrDefault("doc_values", defaultValue);
|
return (boolean) fieldMapping.getOrDefault("doc_values", defaultValue);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,148 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the "Elastic License
|
||||||
|
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||||
|
* Public License v 1"; you may not use this file except in compliance with, at
|
||||||
|
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||||
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.DirectoryReader;
|
||||||
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
|
import org.apache.lucene.store.Directory;
|
||||||
|
import org.apache.lucene.tests.index.RandomIndexWriter;
|
||||||
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
import org.elasticsearch.index.fieldvisitor.StoredFieldLoader;
|
||||||
|
import org.elasticsearch.plugins.internal.XContentMeteringParserDecorator;
|
||||||
|
import org.elasticsearch.search.fetch.StoredFieldsSpec;
|
||||||
|
import org.elasticsearch.search.lookup.SearchLookup;
|
||||||
|
import org.elasticsearch.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.xcontent.XContentType;
|
||||||
|
import org.junit.Assert;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static org.apache.lucene.tests.util.LuceneTestCase.newDirectory;
|
||||||
|
import static org.apache.lucene.tests.util.LuceneTestCase.random;
|
||||||
|
|
||||||
|
public class BlockLoaderTestRunner {
|
||||||
|
private final BlockLoaderTestCase.Params params;
|
||||||
|
|
||||||
|
public BlockLoaderTestRunner(BlockLoaderTestCase.Params params) {
|
||||||
|
this.params = params;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void runTest(MapperService mapperService, Map<String, Object> document, Object expected, String blockLoaderFieldName)
|
||||||
|
throws IOException {
|
||||||
|
var documentXContent = XContentBuilder.builder(XContentType.JSON.xContent()).map(document);
|
||||||
|
|
||||||
|
Object blockLoaderResult = setupAndInvokeBlockLoader(mapperService, documentXContent, blockLoaderFieldName);
|
||||||
|
Assert.assertEquals(expected, blockLoaderResult);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object setupAndInvokeBlockLoader(MapperService mapperService, XContentBuilder document, String fieldName) throws IOException {
|
||||||
|
try (Directory directory = newDirectory()) {
|
||||||
|
RandomIndexWriter iw = new RandomIndexWriter(random(), directory);
|
||||||
|
|
||||||
|
var source = new SourceToParse(
|
||||||
|
"1",
|
||||||
|
BytesReference.bytes(document),
|
||||||
|
XContentType.JSON,
|
||||||
|
null,
|
||||||
|
Map.of(),
|
||||||
|
true,
|
||||||
|
XContentMeteringParserDecorator.NOOP
|
||||||
|
);
|
||||||
|
LuceneDocument doc = mapperService.documentMapper().parse(source).rootDoc();
|
||||||
|
|
||||||
|
iw.addDocument(doc);
|
||||||
|
iw.close();
|
||||||
|
|
||||||
|
try (DirectoryReader reader = DirectoryReader.open(directory)) {
|
||||||
|
LeafReaderContext context = reader.leaves().get(0);
|
||||||
|
return load(createBlockLoader(mapperService, fieldName), context, mapperService);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Object load(BlockLoader blockLoader, LeafReaderContext context, MapperService mapperService) throws IOException {
|
||||||
|
// `columnAtATimeReader` is tried first, we mimic `ValuesSourceReaderOperator`
|
||||||
|
var columnAtATimeReader = blockLoader.columnAtATimeReader(context);
|
||||||
|
if (columnAtATimeReader != null) {
|
||||||
|
var block = (TestBlock) columnAtATimeReader.read(TestBlock.factory(context.reader().numDocs()), TestBlock.docs(0));
|
||||||
|
if (block.size() == 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return block.get(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
StoredFieldsSpec storedFieldsSpec = blockLoader.rowStrideStoredFieldSpec();
|
||||||
|
SourceLoader.Leaf leafSourceLoader = null;
|
||||||
|
if (storedFieldsSpec.requiresSource()) {
|
||||||
|
var sourceLoader = mapperService.mappingLookup().newSourceLoader(null, SourceFieldMetrics.NOOP);
|
||||||
|
leafSourceLoader = sourceLoader.leaf(context.reader(), null);
|
||||||
|
storedFieldsSpec = storedFieldsSpec.merge(
|
||||||
|
new StoredFieldsSpec(true, storedFieldsSpec.requiresMetadata(), sourceLoader.requiredStoredFields())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
BlockLoaderStoredFieldsFromLeafLoader storedFieldsLoader = new BlockLoaderStoredFieldsFromLeafLoader(
|
||||||
|
StoredFieldLoader.fromSpec(storedFieldsSpec).getLoader(context, null),
|
||||||
|
leafSourceLoader
|
||||||
|
);
|
||||||
|
storedFieldsLoader.advanceTo(0);
|
||||||
|
|
||||||
|
BlockLoader.Builder builder = blockLoader.builder(TestBlock.factory(context.reader().numDocs()), 1);
|
||||||
|
blockLoader.rowStrideReader(context).read(0, storedFieldsLoader, builder);
|
||||||
|
var block = (TestBlock) builder.build();
|
||||||
|
if (block.size() == 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return block.get(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
private BlockLoader createBlockLoader(MapperService mapperService, String fieldName) {
|
||||||
|
SearchLookup searchLookup = new SearchLookup(mapperService.mappingLookup().fieldTypesLookup()::get, null, null);
|
||||||
|
|
||||||
|
return mapperService.fieldType(fieldName).blockLoader(new MappedFieldType.BlockLoaderContext() {
|
||||||
|
@Override
|
||||||
|
public String indexName() {
|
||||||
|
return mapperService.getIndexSettings().getIndex().getName();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IndexSettings indexSettings() {
|
||||||
|
return mapperService.getIndexSettings();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MappedFieldType.FieldExtractPreference fieldExtractPreference() {
|
||||||
|
return params.preference();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public SearchLookup lookup() {
|
||||||
|
return searchLookup;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Set<String> sourcePaths(String name) {
|
||||||
|
return mapperService.mappingLookup().sourcePaths(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String parentField(String field) {
|
||||||
|
return mapperService.mappingLookup().parentField(field);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FieldNamesFieldMapper.FieldNamesFieldType fieldNames() {
|
||||||
|
return (FieldNamesFieldMapper.FieldNamesFieldType) mapperService.fieldType(FieldNamesFieldMapper.NAME);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
|
@ -15,6 +15,7 @@ import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin;
|
import org.elasticsearch.xpack.aggregatemetric.AggregateMetricMapperPlugin;
|
||||||
import org.elasticsearch.xpack.aggregatemetric.mapper.datageneration.AggregateMetricDoubleDataSourceHandler;
|
import org.elasticsearch.xpack.aggregatemetric.mapper.datageneration.AggregateMetricDoubleDataSourceHandler;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -33,6 +34,11 @@ public class AggregateMetricDoubleFieldBlockLoaderTests extends BlockLoaderTestC
|
||||||
}), params);
|
}), params);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void testBlockLoaderOfMultiField() throws IOException {
|
||||||
|
// Multi fields are noop for aggregate_metric_double.
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
|
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
|
||||||
if (value instanceof Map<?, ?> map) {
|
if (value instanceof Map<?, ?> map) {
|
||||||
|
|
|
@ -13,6 +13,7 @@ import org.elasticsearch.index.mapper.BlockLoaderTestCase;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.xpack.constantkeyword.ConstantKeywordMapperPlugin;
|
import org.elasticsearch.xpack.constantkeyword.ConstantKeywordMapperPlugin;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -22,6 +23,11 @@ public class ConstantKeywordFieldBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
super(FieldType.CONSTANT_KEYWORD.toString(), params);
|
super(FieldType.CONSTANT_KEYWORD.toString(), params);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void testBlockLoaderOfMultiField() throws IOException {
|
||||||
|
// Multi fields are noop for constant_keyword.
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
|
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
|
||||||
return new BytesRef((String) fieldMapping.get("value"));
|
return new BytesRef((String) fieldMapping.get("value"));
|
||||||
|
|
|
@ -379,7 +379,8 @@ public class UnsignedLongFieldMapper extends FieldMapper {
|
||||||
if (hasDocValues() && (blContext.fieldExtractPreference() != FieldExtractPreference.STORED || isSyntheticSource)) {
|
if (hasDocValues() && (blContext.fieldExtractPreference() != FieldExtractPreference.STORED || isSyntheticSource)) {
|
||||||
return new BlockDocValuesReader.LongsBlockLoader(name());
|
return new BlockDocValuesReader.LongsBlockLoader(name());
|
||||||
}
|
}
|
||||||
if (isSyntheticSource) {
|
// Multi fields don't have fallback synthetic source.
|
||||||
|
if (isSyntheticSource && blContext.parentField(name()) == null) {
|
||||||
return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) {
|
return new FallbackSyntheticSourceBlockLoader(fallbackSyntheticSourceBlockLoaderReader(), name()) {
|
||||||
@Override
|
@Override
|
||||||
public Builder builder(BlockFactory factory, int expectedCount) {
|
public Builder builder(BlockFactory factory, int expectedCount) {
|
||||||
|
|
|
@ -313,7 +313,8 @@ public class GeoShapeWithDocValuesFieldMapper extends AbstractShapeGeometryField
|
||||||
if (blContext.fieldExtractPreference() == FieldExtractPreference.EXTRACT_SPATIAL_BOUNDS) {
|
if (blContext.fieldExtractPreference() == FieldExtractPreference.EXTRACT_SPATIAL_BOUNDS) {
|
||||||
return new GeoBoundsBlockLoader(name());
|
return new GeoBoundsBlockLoader(name());
|
||||||
}
|
}
|
||||||
if (isSyntheticSource) {
|
// Multi fields don't have fallback synthetic source.
|
||||||
|
if (isSyntheticSource && blContext.parentField(name()) == null) {
|
||||||
return blockLoaderFromFallbackSyntheticSource(blContext);
|
return blockLoaderFromFallbackSyntheticSource(blContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -245,7 +245,8 @@ public class PointFieldMapper extends AbstractPointGeometryFieldMapper<Cartesian
|
||||||
return new BlockDocValuesReader.LongsBlockLoader(name());
|
return new BlockDocValuesReader.LongsBlockLoader(name());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isSyntheticSource) {
|
// Multi fields don't have fallback synthetic source.s
|
||||||
|
if (isSyntheticSource && blContext.parentField(name()) == null) {
|
||||||
return blockLoaderFromFallbackSyntheticSource(blContext);
|
return blockLoaderFromFallbackSyntheticSource(blContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -201,7 +201,8 @@ public class ShapeFieldMapper extends AbstractShapeGeometryFieldMapper<Geometry>
|
||||||
return new CartesianBoundsBlockLoader(name());
|
return new CartesianBoundsBlockLoader(name());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isSyntheticSource) {
|
// Multi fields don't have fallback synthetic source.
|
||||||
|
if (isSyntheticSource && blContext.parentField(name()) == null) {
|
||||||
return blockLoaderFromFallbackSyntheticSource(blContext);
|
return blockLoaderFromFallbackSyntheticSource(blContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.xcontent.support.MapXContentParser;
|
||||||
import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin;
|
import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin;
|
||||||
import org.elasticsearch.xpack.spatial.datageneration.GeoShapeDataSourceHandler;
|
import org.elasticsearch.xpack.spatial.datageneration.GeoShapeDataSourceHandler;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.nio.ByteOrder;
|
import java.nio.ByteOrder;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -36,6 +37,11 @@ public class GeoShapeFieldBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
super("geo_shape", List.of(new GeoShapeDataSourceHandler()), params);
|
super("geo_shape", List.of(new GeoShapeDataSourceHandler()), params);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void testBlockLoaderOfMultiField() throws IOException {
|
||||||
|
// Multi fields are noop for geo_shape.
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
|
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
|
||||||
|
|
|
@ -19,6 +19,7 @@ import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin;
|
||||||
import org.elasticsearch.xpack.spatial.common.CartesianPoint;
|
import org.elasticsearch.xpack.spatial.common.CartesianPoint;
|
||||||
import org.elasticsearch.xpack.spatial.datageneration.PointDataSourceHandler;
|
import org.elasticsearch.xpack.spatial.datageneration.PointDataSourceHandler;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.nio.ByteOrder;
|
import java.nio.ByteOrder;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
@ -32,6 +33,11 @@ public class PointFieldBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
super("point", List.of(new PointDataSourceHandler()), params);
|
super("point", List.of(new PointDataSourceHandler()), params);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void testBlockLoaderOfMultiField() throws IOException {
|
||||||
|
// Multi fields are noop for point.
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
|
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
|
||||||
|
|
|
@ -22,6 +22,7 @@ import org.elasticsearch.xcontent.support.MapXContentParser;
|
||||||
import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin;
|
import org.elasticsearch.xpack.spatial.LocalStateSpatialPlugin;
|
||||||
import org.elasticsearch.xpack.spatial.datageneration.ShapeDataSourceHandler;
|
import org.elasticsearch.xpack.spatial.datageneration.ShapeDataSourceHandler;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.nio.ByteOrder;
|
import java.nio.ByteOrder;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -34,6 +35,11 @@ public class ShapeFieldBlockLoaderTests extends BlockLoaderTestCase {
|
||||||
super("shape", List.of(new ShapeDataSourceHandler()), params);
|
super("shape", List.of(new ShapeDataSourceHandler()), params);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void testBlockLoaderOfMultiField() throws IOException {
|
||||||
|
// Multi fields are noop for shape.
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
|
protected Object expected(Map<String, Object> fieldMapping, Object value, TestContext testContext) {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue