Synthetic source error on script loads (#88334)

* Source Lookup refactor with error on script synthetic source load

Refactors SourceLookup into a static source lookup used for most cases where we
access the source again after the fetch phase, and a re-loading lookup used by
scripts. The re-loading lookup now also fails with an error when we are using
synthetic source preventing silent failures or non-sensical behavior from
scripts.
This commit is contained in:
tmgordeeva 2022-08-26 11:32:53 -07:00 committed by GitHub
parent 6f5fa14df0
commit db5ddb321f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
63 changed files with 520 additions and 220 deletions

View file

@ -40,6 +40,7 @@ import org.elasticsearch.script.DocValuesDocReader;
import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.ScriptModule; import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Fork;
@ -88,7 +89,8 @@ public class ScriptScoreBenchmark {
private final CircuitBreakerService breakerService = new NoneCircuitBreakerService(); private final CircuitBreakerService breakerService = new NoneCircuitBreakerService();
private final SearchLookup lookup = new SearchLookup( private final SearchLookup lookup = new SearchLookup(
fieldTypes::get, fieldTypes::get,
(mft, lookup, fdo) -> mft.fielddataBuilder(FieldDataContext.noRuntimeFields("benchmark")).build(fieldDataCache, breakerService) (mft, lookup, fdo) -> mft.fielddataBuilder(FieldDataContext.noRuntimeFields("benchmark")).build(fieldDataCache, breakerService),
new SourceLookup.ReaderSourceProvider()
); );
@Param({ "expression", "metal", "painless_cast", "painless_def" }) @Param({ "expression", "metal", "painless_cast", "painless_def" })

View file

@ -79,8 +79,7 @@ public class FetchSourcePhaseBenchmark {
@Benchmark @Benchmark
public BytesReference filterObjects() throws IOException { public BytesReference filterObjects() throws IOException {
SourceLookup lookup = new SourceLookup(); SourceLookup lookup = new SourceLookup(new SourceLookup.BytesSourceProvider(sourceBytes));
lookup.setSource(sourceBytes);
Object value = lookup.filter(fetchContext); Object value = lookup.filter(fetchContext);
return FetchSourcePhase.objectToBytes(value, XContentType.JSON, Math.min(1024, lookup.internalSourceRef().length())); return FetchSourcePhase.objectToBytes(value, XContentType.JSON, Math.min(1024, lookup.internalSourceRef().length()));
} }

View file

@ -0,0 +1,5 @@
pr: 88334
summary: Synthetic source error on script loads
area: Geo
type: bug
issues: []

View file

@ -15,6 +15,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.script.FieldScript; import org.elasticsearch.script.FieldScript;
import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.ScriptException;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
@ -49,7 +50,11 @@ public class ExpressionFieldScriptTests extends ESTestCase {
when(fieldData.load(any())).thenReturn(atomicFieldData); when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine(); service = new ExpressionScriptEngine();
lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, _lookup, fdt) -> fieldData); lookup = new SearchLookup(
field -> field.equals("field") ? fieldType : null,
(ignored, _lookup, fdt) -> fieldData,
new SourceLookup.ReaderSourceProvider()
);
} }
private FieldScript.LeafFactory compile(String expression) { private FieldScript.LeafFactory compile(String expression) {

View file

@ -17,6 +17,7 @@ import org.elasticsearch.script.DocValuesDocReader;
import org.elasticsearch.script.NumberSortScript; import org.elasticsearch.script.NumberSortScript;
import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.ScriptException;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
@ -50,7 +51,11 @@ public class ExpressionNumberSortScriptTests extends ESTestCase {
when(fieldData.load(any())).thenReturn(atomicFieldData); when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine(); service = new ExpressionScriptEngine();
lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, _lookup, fdt) -> fieldData); lookup = new SearchLookup(
field -> field.equals("field") ? fieldType : null,
(ignored, _lookup, fdt) -> fieldData,
new SourceLookup.ReaderSourceProvider()
);
} }
private NumberSortScript.LeafFactory compile(String expression) { private NumberSortScript.LeafFactory compile(String expression) {

View file

@ -16,6 +16,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.ScriptException;
import org.elasticsearch.script.TermsSetQueryScript; import org.elasticsearch.script.TermsSetQueryScript;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
@ -49,7 +50,11 @@ public class ExpressionTermsSetQueryTests extends ESTestCase {
when(fieldData.load(any())).thenReturn(atomicFieldData); when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine(); service = new ExpressionScriptEngine();
lookup = new SearchLookup(field -> field.equals("field") ? fieldType : null, (ignored, _lookup, fdt) -> fieldData); lookup = new SearchLookup(
field -> field.equals("field") ? fieldType : null,
(ignored, _lookup, fdt) -> fieldData,
new SourceLookup.ReaderSourceProvider()
);
} }
private TermsSetQueryScript.LeafFactory compile(String expression) { private TermsSetQueryScript.LeafFactory compile(String expression) {

View file

@ -22,6 +22,7 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase; import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase;
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter; import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -86,7 +87,7 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
percolatorLeafReaderContext, percolatorLeafReaderContext,
slot slot
); );
subContext.sourceLookup().setSource(document); subContext.sourceLookup().setSourceProvider(new SourceLookup.BytesSourceProvider(document));
// force source because MemoryIndex does not store fields // force source because MemoryIndex does not store fields
SearchHighlightContext highlight = new SearchHighlightContext(fetchContext.highlight().fields(), true); SearchHighlightContext highlight = new SearchHighlightContext(fetchContext.highlight().fields(), true);
FetchSubPhaseProcessor processor = highlightPhase.getProcessor(fetchContext, highlight, query); FetchSubPhaseProcessor processor = highlightPhase.getProcessor(fetchContext, highlight, query);

View file

@ -29,6 +29,7 @@ import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.lookup.FieldLookup; import org.elasticsearch.search.lookup.FieldLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
@ -142,7 +143,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
static Object sourceScript(Map<String, Object> vars, String path) { static Object sourceScript(Map<String, Object> vars, String path) {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Map<String, Object> source = (Map<String, Object>) vars.get("_source"); Map<String, Object> source = ((SourceLookup) vars.get("_source")).source();
return XContentMapValues.extractValue(path, source); return XContentMapValues.extractValue(path, source);
} }

View file

@ -25,6 +25,7 @@ import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import java.util.Collection; import java.util.Collection;
@ -61,7 +62,7 @@ public class SearchStatsIT extends ESIntegTestCase {
@Override @Override
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() { protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
return Collections.singletonMap("_source.field", vars -> { return Collections.singletonMap("_source.field", vars -> {
Map<?, ?> src = (Map) vars.get("_source"); Map<?, ?> src = ((SourceLookup) vars.get("_source")).source();
return src.get("field"); return src.get("field");
}); });
} }

View file

@ -332,8 +332,7 @@ public class UpdateHelper {
BytesReference sourceFilteredAsBytes = sourceAsBytes; BytesReference sourceFilteredAsBytes = sourceAsBytes;
if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) { if (request.fetchSource().includes().length > 0 || request.fetchSource().excludes().length > 0) {
SourceLookup sourceLookup = new SourceLookup(); SourceLookup sourceLookup = new SourceLookup(new SourceLookup.MapSourceProvider(source));
sourceLookup.setSource(source);
Object value = sourceLookup.filter(request.fetchSource()); Object value = sourceLookup.filter(request.fetchSource());
try { try {
final int initialCapacity = sourceAsBytes != null ? Math.min(1024, sourceAsBytes.length()) : 1024; final int initialCapacity = sourceAsBytes != null ? Math.min(1024, sourceAsBytes.length()) : 1024;

View file

@ -24,6 +24,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentParserConfiguration;
@ -144,7 +145,8 @@ public final class DocumentParser {
context.mappingLookup().indexTimeLookup()::get, context.mappingLookup().indexTimeLookup()::get,
(ft, lookup, fto) -> ft.fielddataBuilder( (ft, lookup, fto) -> ft.fielddataBuilder(
new FieldDataContext(context.indexSettings().getIndex().getName(), lookup, context.mappingLookup()::sourcePaths, fto) new FieldDataContext(context.indexSettings().getIndex().getName(), lookup, context.mappingLookup()::sourcePaths, fto)
).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()) ).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()),
new SourceLookup.ReaderSourceProvider()
); );
// field scripts can be called both by the loop at the end of this method and via // field scripts can be called both by the loop at the end of this method and via
// the document reader, so to ensure that we don't run them multiple times we // the document reader, so to ensure that we don't run them multiple times we

View file

@ -13,6 +13,7 @@ import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.search.lookup.SourceLookup;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
@ -470,4 +471,18 @@ public final class MappingLookup {
throw new MapperParsingException("Field [" + name + "] attempted to shadow a time_series_metric"); throw new MapperParsingException("Field [" + name + "] attempted to shadow a time_series_metric");
} }
} }
/**
* Returns a SourceProvider describing how to read the source. If using synthetic source, returns the null source provider
* expecting the source to either be provided later by a fetch phase or not be accessed at all (as in scripts).
* @return
*/
public SourceLookup.SourceProvider getSourceProvider() {
SourceFieldMapper sourceMapper = (SourceFieldMapper) getMapper("_source");
if (sourceMapper == null || sourceMapper.isSynthetic() == false) {
return new SourceLookup.ReaderSourceProvider();
} else {
return new SourceLookup.NullSourceProvider();
}
}
} }

View file

@ -50,8 +50,7 @@ public class NestedValueFetcher implements ValueFetcher {
for (Object entry : nestedValues) { for (Object entry : nestedValues) {
// add this one entry only to the stub and use this as source lookup // add this one entry only to the stub and use this as source lookup
stub.put(nestedFieldName, entry); stub.put(nestedFieldName, entry);
SourceLookup nestedSourceLookup = new SourceLookup(); SourceLookup nestedSourceLookup = new SourceLookup(new SourceLookup.MapSourceProvider(filteredSource));
nestedSourceLookup.setSource(filteredSource);
Map<String, DocumentField> fetchResult = nestedFieldFetcher.fetch(nestedSourceLookup); Map<String, DocumentField> fetchResult = nestedFieldFetcher.fetch(nestedSourceLookup);

View file

@ -83,7 +83,7 @@ public interface SourceLoader {
}; };
/** /**
* Load {@code _source} from doc vales. * Load {@code _source} from doc values.
*/ */
class Synthetic implements SourceLoader { class Synthetic implements SourceLoader {
private final SyntheticFieldLoader loader; private final SyntheticFieldLoader loader;

View file

@ -59,6 +59,7 @@ import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.NestedDocuments; import org.elasticsearch.search.NestedDocuments;
import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentParserConfiguration;
@ -481,12 +482,16 @@ public class SearchExecutionContext extends QueryRewriteContext {
*/ */
public SearchLookup lookup() { public SearchLookup lookup() {
if (this.lookup == null) { if (this.lookup == null) {
SourceLookup.SourceProvider sourceProvider = mappingLookup == null
? new SourceLookup.ReaderSourceProvider()
: mappingLookup.getSourceProvider();
this.lookup = new SearchLookup( this.lookup = new SearchLookup(
this::getFieldType, this::getFieldType,
(fieldType, searchLookup, fielddataOperation) -> indexFieldDataLookup.apply( (fieldType, searchLookup, fielddataOperation) -> indexFieldDataLookup.apply(
fieldType, fieldType,
new FieldDataContext(fullyQualifiedIndex.getName(), searchLookup, this::sourcePath, fielddataOperation) new FieldDataContext(fullyQualifiedIndex.getName(), searchLookup, this::sourcePath, fielddataOperation)
) ),
sourceProvider
); );
} }
return this.lookup; return this.lookup;

View file

@ -272,8 +272,7 @@ public class TermVectorsService {
} }
if (source != null) { if (source != null) {
MappingLookup mappingLookup = indexShard.mapperService().mappingLookup(); MappingLookup mappingLookup = indexShard.mapperService().mappingLookup();
SourceLookup sourceLookup = new SourceLookup(); SourceLookup sourceLookup = new SourceLookup(new SourceLookup.MapSourceProvider(source));
sourceLookup.setSource(source);
for (String field : fields) { for (String field : fields) {
if (values.containsKey(field) == false) { if (values.containsKey(field) == false) {
SourceValueFetcher valueFetcher = SourceValueFetcher.toString(mappingLookup.sourcePaths(field)); SourceValueFetcher valueFetcher = SourceValueFetcher.toString(mappingLookup.sourcePaths(field));

View file

@ -383,11 +383,12 @@ public class FetchPhase {
if (source != null) { if (source != null) {
// Store the loaded source on the hit context so that fetch subphases can access it. // Store the loaded source on the hit context so that fetch subphases can access it.
// Also make it available to scripts by storing it on the shared SearchLookup instance. // Also make it available to scripts by storing it on the shared SearchLookup instance.
hitContext.sourceLookup().setSource(source); SourceLookup.BytesSourceProvider sourceBytes = new SourceLookup.BytesSourceProvider(source);
hitContext.sourceLookup().setSourceProvider(sourceBytes);
SourceLookup scriptSourceLookup = context.getSearchExecutionContext().lookup().source(); SourceLookup scriptSourceLookup = context.getSearchExecutionContext().lookup().source();
scriptSourceLookup.setSegmentAndDocument(subReaderContext, subDocId); scriptSourceLookup.setSegmentAndDocument(subReaderContext, subDocId);
scriptSourceLookup.setSource(source); scriptSourceLookup.setSourceProvider(sourceBytes);
} }
return hitContext; return hitContext;
} }
@ -486,8 +487,7 @@ public class FetchPhase {
} }
} }
hitContext.sourceLookup().setSource(nestedSourceAsMap); hitContext.sourceLookup().setSourceProvider(new SourceLookup.MapSourceProvider(nestedSourceAsMap, rootSourceContentType));
hitContext.sourceLookup().setSourceContentType(rootSourceContentType);
} }
return hitContext; return hitContext;
} }

View file

@ -31,7 +31,7 @@ public interface FetchSubPhase {
this.hit = hit; this.hit = hit;
this.readerContext = context; this.readerContext = context;
this.docId = docId; this.docId = docId;
this.sourceLookup = new SourceLookup(); this.sourceLookup = new SourceLookup(new SourceLookup.ReaderSourceProvider());
sourceLookup.setSegmentAndDocument(context, docId); sourceLookup.setSegmentAndDocument(context, docId);
} }

View file

@ -29,6 +29,7 @@ import java.util.LinkedHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
/** /**
@ -172,15 +173,20 @@ public class FieldFetcher {
documentFields.put(field, docField); documentFields.put(field, docField);
} }
} }
collectUnmapped(documentFields, sourceLookup, "", 0); collectUnmapped(documentFields, () -> sourceLookup.source(), "", 0);
return documentFields; return documentFields;
} }
private void collectUnmapped(Map<String, DocumentField> documentFields, Map<String, Object> source, String parentPath, int lastState) { private void collectUnmapped(
Map<String, DocumentField> documentFields,
Supplier<Map<String, Object>> source,
String parentPath,
int lastState
) {
// lookup field patterns containing wildcards // lookup field patterns containing wildcards
if (this.unmappedFieldsFetchAutomaton != null) { if (this.unmappedFieldsFetchAutomaton != null) {
for (String key : source.keySet()) { for (String key : source.get().keySet()) {
Object value = source.get(key); Object value = source.get().get(key);
String currentPath = parentPath + key; String currentPath = parentPath + key;
if (this.fieldContexts.containsKey(currentPath)) { if (this.fieldContexts.containsKey(currentPath)) {
continue; continue;
@ -196,7 +202,7 @@ public class FieldFetcher {
Map<String, Object> objectMap = (Map<String, Object>) value; Map<String, Object> objectMap = (Map<String, Object>) value;
collectUnmapped( collectUnmapped(
documentFields, documentFields,
objectMap, () -> objectMap,
currentPath + ".", currentPath + ".",
step(this.unmappedFieldsFetchAutomaton, ".", currentState) step(this.unmappedFieldsFetchAutomaton, ".", currentState)
); );
@ -227,7 +233,7 @@ public class FieldFetcher {
if (this.fieldContexts.containsKey(path)) { if (this.fieldContexts.containsKey(path)) {
continue; // this is actually a mapped field continue; // this is actually a mapped field
} }
List<Object> values = XContentMapValues.extractRawValues(path, source); List<Object> values = XContentMapValues.extractRawValues(path, source.get());
if (values.isEmpty() == false) { if (values.isEmpty() == false) {
documentFields.put(path, new DocumentField(path, values)); documentFields.put(path, new DocumentField(path, values));
} }
@ -241,7 +247,7 @@ public class FieldFetcher {
if (value instanceof Map) { if (value instanceof Map) {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
final Map<String, Object> objectMap = (Map<String, Object>) value; final Map<String, Object> objectMap = (Map<String, Object>) value;
collectUnmapped(documentFields, objectMap, parentPath + ".", step(this.unmappedFieldsFetchAutomaton, ".", lastState)); collectUnmapped(documentFields, () -> objectMap, parentPath + ".", step(this.unmappedFieldsFetchAutomaton, ".", lastState));
} else if (value instanceof List) { } else if (value instanceof List) {
// weird case, but can happen for objects with "enabled" : "false" // weird case, but can happen for objects with "enabled" : "false"
collectUnmappedList(documentFields, (List<?>) value, parentPath, lastState); collectUnmappedList(documentFields, (List<?>) value, parentPath, lastState);

View file

@ -53,11 +53,12 @@ public class SearchLookup {
*/ */
public SearchLookup( public SearchLookup(
Function<String, MappedFieldType> fieldTypeLookup, Function<String, MappedFieldType> fieldTypeLookup,
TriFunction<MappedFieldType, Supplier<SearchLookup>, MappedFieldType.FielddataOperation, IndexFieldData<?>> fieldDataLookup TriFunction<MappedFieldType, Supplier<SearchLookup>, MappedFieldType.FielddataOperation, IndexFieldData<?>> fieldDataLookup,
SourceLookup.SourceProvider sourceProvider
) { ) {
this.fieldTypeLookup = fieldTypeLookup; this.fieldTypeLookup = fieldTypeLookup;
this.fieldChain = Collections.emptySet(); this.fieldChain = Collections.emptySet();
this.sourceLookup = new SourceLookup(); this.sourceLookup = new SourceLookup(sourceProvider);
this.fieldDataLookup = fieldDataLookup; this.fieldDataLookup = fieldDataLookup;
} }

View file

@ -25,26 +25,23 @@ import org.elasticsearch.xcontent.XContentType;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.function.Supplier; import java.util.function.Supplier;
import static java.util.Collections.emptyMap;
public class SourceLookup implements Map<String, Object> { public class SourceLookup implements Map<String, Object> {
private SourceProvider sourceProvider;
private LeafReader reader;
private CheckedBiConsumer<Integer, FieldsVisitor, IOException> fieldReader;
private int docId = -1; private int docId = -1;
private BytesReference sourceAsBytes; public SourceLookup(SourceProvider sourceProvider) {
private Map<String, Object> source; this.sourceProvider = sourceProvider;
private XContentType sourceContentType; }
public XContentType sourceContentType() { public XContentType sourceContentType() {
return sourceContentType; return sourceProvider.sourceContentType();
} }
public int docId() { public int docId() {
@ -62,101 +59,30 @@ public class SourceLookup implements Map<String, Object> {
* number. * number.
*/ */
public Map<String, Object> source() { public Map<String, Object> source() {
if (source != null) { return sourceProvider.source();
return source;
}
if (sourceAsBytes != null) {
Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(sourceAsBytes);
sourceContentType = tuple.v1();
source = tuple.v2();
return source;
}
try {
FieldsVisitor sourceFieldVisitor = new FieldsVisitor(true);
fieldReader.accept(docId, sourceFieldVisitor);
BytesReference source = sourceFieldVisitor.source();
if (source == null) {
this.source = emptyMap();
this.sourceContentType = null;
} else {
Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(source);
this.sourceContentType = tuple.v1();
this.source = tuple.v2();
}
} catch (Exception e) {
throw new ElasticsearchParseException("failed to parse / load source", e);
}
return this.source;
} }
private static Tuple<XContentType, Map<String, Object>> sourceAsMapAndType(BytesReference source) throws ElasticsearchParseException { public void setSourceProvider(SourceProvider sourceProvider) {
return XContentHelper.convertToMap(source, false); this.sourceProvider = sourceProvider;
}
/**
* Get the source as a {@link Map} of java objects.
* <p>
* Important: This can lose precision on numbers with a decimal point. It
* converts numbers like {@code "n": 1234.567} to a {@code double} which
* only has 52 bits of precision in the mantissa. This will come up most
* frequently when folks write nanosecond precision dates as a decimal
* number.
*/
public static Map<String, Object> sourceAsMap(BytesReference source) throws ElasticsearchParseException {
return sourceAsMapAndType(source).v2();
} }
public void setSegmentAndDocument(LeafReaderContext context, int docId) { public void setSegmentAndDocument(LeafReaderContext context, int docId) {
// if we are called with the same document, don't invalidate source sourceProvider.setSegmentAndDocument(context, docId);
if (this.reader == context.reader() && this.docId == docId) {
return;
}
// only reset reader and fieldReader when reader changes
if (this.reader != context.reader()) {
this.reader = context.reader();
// All the docs to fetch are adjacent but Lucene stored fields are optimized
// for random access and don't optimize for sequential access - except for merging.
// So we do a little hack here and pretend we're going to do merges in order to
// get better sequential access.
if (context.reader()instanceof SequentialStoredFieldsLeafReader lf) {
// Avoid eagerly loading the stored fields reader, since this can be expensive
Supplier<StoredFieldsReader> supplier = new MemoizedSupplier<>(lf::getSequentialStoredFieldsReader);
fieldReader = (d, v) -> supplier.get().visitDocument(d, v);
} else {
fieldReader = context.reader()::document;
}
}
this.source = null;
this.sourceAsBytes = null;
this.docId = docId; this.docId = docId;
} }
public void setSource(BytesReference source) {
this.sourceAsBytes = source;
}
public void setSourceContentType(XContentType sourceContentType) {
this.sourceContentType = sourceContentType;
}
public void setSource(Map<String, Object> source) {
this.source = source;
}
/** /**
* Internal source representation, might be compressed.... * Internal source representation, might be compressed....
*/ */
public BytesReference internalSourceRef() { public BytesReference internalSourceRef() {
return sourceAsBytes; return sourceProvider.sourceAsBytes();
} }
/** /**
* Checks if the source has been deserialized as a {@link Map} of java objects. * Checks if the source has been deserialized as a {@link Map} of java objects.
*/ */
public boolean hasSourceAsMap() { public boolean hasSourceAsMap() {
return source != null; return sourceProvider.hasSourceAsMap();
} }
/** /**
@ -174,23 +100,7 @@ public class SourceLookup implements Map<String, Object> {
* @return The list of found values or an empty list if none are found * @return The list of found values or an empty list if none are found
*/ */
public List<Object> extractRawValuesWithoutCaching(String path) { public List<Object> extractRawValuesWithoutCaching(String path) {
if (source != null) { return sourceProvider.extractRawValuesWithoutCaching(path);
return XContentMapValues.extractRawValues(path, source);
}
if (sourceAsBytes != null) {
return XContentMapValues.extractRawValues(
path,
XContentHelper.convertToMap(sourceAsBytes, false, null, Set.of(path), null).v2()
);
}
try {
FieldsVisitor sourceFieldVisitor = new FieldsVisitor(true);
fieldReader.accept(docId, sourceFieldVisitor);
BytesReference source = sourceFieldVisitor.source();
return XContentMapValues.extractRawValues(path, XContentHelper.convertToMap(source, false, null, Set.of(path), null).v2());
} catch (Exception e) {
throw new ElasticsearchParseException("failed to parse / load source", e);
}
} }
/** /**
@ -212,6 +122,23 @@ public class SourceLookup implements Map<String, Object> {
return context.getFilter().apply(source()); return context.getFilter().apply(source());
} }
private static Tuple<XContentType, Map<String, Object>> sourceAsMapAndType(BytesReference source) throws ElasticsearchParseException {
return XContentHelper.convertToMap(source, false);
}
/**
* Get the source as a {@link Map} of java objects.
* <p>
* Important: This can lose precision on numbers with a decimal point. It
* converts numbers like {@code "n": 1234.567} to a {@code double} which
* only has 52 bits of precision in the mantissa. This will come up most
* frequently when folks write nanosecond precision dates as a decimal
* number.
*/
public static Map<String, Object> sourceAsMap(BytesReference source) throws ElasticsearchParseException {
return sourceAsMapAndType(source).v2();
}
@Override @Override
public Object get(Object key) { public Object get(Object key) {
return source().get(key); return source().get(key);
@ -272,4 +199,255 @@ public class SourceLookup implements Map<String, Object> {
public void clear() { public void clear() {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
/**
* SourceProvider describes how SourceLookup will access the source.
*/
public interface SourceProvider {
Map<String, Object> source();
XContentType sourceContentType();
BytesReference sourceAsBytes();
List<Object> extractRawValuesWithoutCaching(String path);
boolean hasSourceAsMap();
void setSegmentAndDocument(LeafReaderContext context, int docId);
}
/**
* Null source provider when we expect to be given a real source provider in the future.
* Exceptions expected to be primarily seen by scripts attempting to access synthetic source.
*/
public static class NullSourceProvider implements SourceProvider {
@Override
public Map<String, Object> source() {
throw new IllegalArgumentException("no source available");
}
@Override
public XContentType sourceContentType() {
throw new IllegalArgumentException("no source available");
}
@Override
public BytesReference sourceAsBytes() {
throw new IllegalArgumentException("no source available");
}
@Override
public List<Object> extractRawValuesWithoutCaching(String path) {
throw new IllegalArgumentException("no source available");
}
@Override
public boolean hasSourceAsMap() {
return false;
}
@Override
public void setSegmentAndDocument(LeafReaderContext context, int docId) {
//
}
}
/**
* Provider for source using a given map and optional content type.
*/
public static class MapSourceProvider implements SourceProvider {
private Map<String, Object> source;
private XContentType sourceContentType;
public MapSourceProvider(Map<String, Object> source) {
this.source = source;
}
public MapSourceProvider(Map<String, Object> source, @Nullable XContentType sourceContentType) {
this.source = source;
this.sourceContentType = sourceContentType;
}
@Override
public Map<String, Object> source() {
return source;
}
@Override
public XContentType sourceContentType() {
return sourceContentType;
}
@Override
public BytesReference sourceAsBytes() {
throw new UnsupportedOperationException("source as bytes unavailable - empty or already parsed to map");
}
@Override
public List<Object> extractRawValuesWithoutCaching(String path) {
return XContentMapValues.extractRawValues(path, source);
}
@Override
public boolean hasSourceAsMap() {
return true;
}
@Override
public void setSegmentAndDocument(LeafReaderContext context, int docId) {
//
}
}
/**
* Provider for source using given source bytes.
*/
public static class BytesSourceProvider implements SourceProvider {
private BytesReference sourceAsBytes;
private Map<String, Object> source;
private XContentType sourceContentType;
public BytesSourceProvider(BytesReference sourceAsBytes) {
this.sourceAsBytes = sourceAsBytes;
}
void parseSource() {
Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(sourceAsBytes);
this.source = tuple.v2();
this.sourceContentType = tuple.v1();
}
@Override
public Map<String, Object> source() {
if (source == null) {
parseSource();
}
return source;
}
@Override
public XContentType sourceContentType() {
if (source == null) {
parseSource();
}
return sourceContentType;
}
@Override
public BytesReference sourceAsBytes() {
return sourceAsBytes;
}
@Override
public List<Object> extractRawValuesWithoutCaching(String path) {
if (source != null) {
return XContentMapValues.extractRawValues(path, source);
}
return XContentMapValues.extractRawValues(
path,
XContentHelper.convertToMap(sourceAsBytes, false, null, Set.of(path), null).v2()
);
}
@Override
public boolean hasSourceAsMap() {
return source != null;
}
@Override
public void setSegmentAndDocument(LeafReaderContext context, int docId) {
//
}
}
/**
* Provider for source using a fields visitor.
*/
public static class ReaderSourceProvider implements SourceProvider {
private LeafReader reader;
private CheckedBiConsumer<Integer, FieldsVisitor, IOException> fieldReader;
private int docId = -1;
private SourceProvider sourceProvider;
public void setSegmentAndDocument(LeafReaderContext context, int docId) {
// if we are called with the same document, don't invalidate source
if (this.reader == context.reader() && this.docId == docId) {
return;
}
// only reset reader and fieldReader when reader changes
if (this.reader != context.reader()) {
this.reader = context.reader();
// All the docs to fetch are adjacent but Lucene stored fields are optimized
// for random access and don't optimize for sequential access - except for merging.
// So we do a little hack here and pretend we're going to do merges in order to
// get better sequential access.
if (context.reader()instanceof SequentialStoredFieldsLeafReader lf) {
// Avoid eagerly loading the stored fields reader, since this can be expensive
Supplier<StoredFieldsReader> supplier = new MemoizedSupplier<>(lf::getSequentialStoredFieldsReader);
fieldReader = (d, v) -> supplier.get().visitDocument(d, v);
} else {
fieldReader = context.reader()::document;
}
}
this.sourceProvider = null;
this.docId = docId;
}
@Override
public Map<String, Object> source() {
if (sourceProvider == null) {
readSourceBytes();
}
return sourceProvider.source();
}
private void readSourceBytes() {
try {
FieldsVisitor sourceFieldVisitor = new FieldsVisitor(true);
fieldReader.accept(docId, sourceFieldVisitor);
BytesReference sourceAsBytes = sourceFieldVisitor.source();
if (sourceAsBytes == null) {
sourceProvider = new MapSourceProvider(Collections.emptyMap());
} else {
sourceProvider = new BytesSourceProvider(sourceAsBytes);
((BytesSourceProvider) sourceProvider).parseSource();
}
} catch (Exception e) {
throw new ElasticsearchParseException("failed to parse / load source", e);
}
}
@Override
public XContentType sourceContentType() {
if (sourceProvider == null) {
readSourceBytes();
}
return sourceProvider.sourceContentType();
}
@Override
public BytesReference sourceAsBytes() {
if (sourceProvider == null) {
readSourceBytes();
}
return sourceProvider.sourceAsBytes();
}
@Override
public List<Object> extractRawValuesWithoutCaching(String path) {
if (sourceProvider == null) {
readSourceBytes();
}
return sourceProvider.extractRawValuesWithoutCaching(path);
}
@Override
public boolean hasSourceAsMap() {
return sourceProvider != null && sourceProvider.hasSourceAsMap();
}
}
} }

View file

@ -40,6 +40,7 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptCompiler; import org.elasticsearch.script.ScriptCompiler;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
@ -135,7 +136,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
searchLookupSetOnce.set(fdc.lookupSupplier()); searchLookupSetOnce.set(fdc.lookupSupplier());
return (IndexFieldData.Builder) (cache, breakerService) -> null; return (IndexFieldData.Builder) (cache, breakerService) -> null;
}); });
SearchLookup searchLookup = new SearchLookup(null, null); SearchLookup searchLookup = new SearchLookup(null, null, new SourceLookup.ReaderSourceProvider());
ifdService.getForField(ft, new FieldDataContext("qualified", () -> searchLookup, null, MappedFieldType.FielddataOperation.SEARCH)); ifdService.getForField(ft, new FieldDataContext("qualified", () -> searchLookup, null, MappedFieldType.FielddataOperation.SEARCH));
assertSame(searchLookup, searchLookupSetOnce.get().get()); assertSame(searchLookup, searchLookupSetOnce.get().get());
} }

View file

@ -29,6 +29,7 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.StringFieldScript; import org.elasticsearch.script.StringFieldScript;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentFactory;
@ -203,6 +204,14 @@ public abstract class AbstractScriptFieldTypeTestCase extends MapperServiceTestC
} }
protected static SearchExecutionContext mockContext(boolean allowExpensiveQueries, MappedFieldType mappedFieldType) { protected static SearchExecutionContext mockContext(boolean allowExpensiveQueries, MappedFieldType mappedFieldType) {
return mockContext(allowExpensiveQueries, mappedFieldType, new SourceLookup.ReaderSourceProvider());
}
protected static SearchExecutionContext mockContext(
boolean allowExpensiveQueries,
MappedFieldType mappedFieldType,
SourceLookup.SourceProvider sourceProvider
) {
SearchExecutionContext context = mock(SearchExecutionContext.class); SearchExecutionContext context = mock(SearchExecutionContext.class);
if (mappedFieldType != null) { if (mappedFieldType != null) {
when(context.getFieldType(anyString())).thenReturn(mappedFieldType); when(context.getFieldType(anyString())).thenReturn(mappedFieldType);
@ -211,7 +220,8 @@ public abstract class AbstractScriptFieldTypeTestCase extends MapperServiceTestC
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
context::getFieldType, context::getFieldType,
(mft, lookupSupplier, fdo) -> mft.fielddataBuilder(new FieldDataContext("test", lookupSupplier, context::sourcePath, fdo)) (mft, lookupSupplier, fdo) -> mft.fielddataBuilder(new FieldDataContext("test", lookupSupplier, context::sourcePath, fdo))
.build(null, null) .build(null, null),
sourceProvider
); );
when(context.lookup()).thenReturn(lookup); when(context.lookup()).thenReturn(lookup);
when(context.getForField(any(), any())).then(args -> { when(context.getForField(any(), any())).then(args -> {

View file

@ -17,6 +17,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.BooleanFieldScript; import org.elasticsearch.script.BooleanFieldScript;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
@ -57,7 +58,7 @@ public class BooleanFieldScriptTests extends FieldScriptTestCase<BooleanFieldScr
BooleanFieldScript script = new BooleanFieldScript( BooleanFieldScript script = new BooleanFieldScript(
"test", "test",
Map.of(), Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null), new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
) { ) {
@Override @Override

View file

@ -422,7 +422,7 @@ public class BooleanScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeT
case "read_foo" -> (fieldName, params, lookup) -> (ctx) -> new BooleanFieldScript(fieldName, params, lookup, ctx) { case "read_foo" -> (fieldName, params, lookup) -> (ctx) -> new BooleanFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
for (Object foo : (List<?>) lookup.source().get("foo")) { for (Object foo : (List<?>) lookup.source().source().get("foo")) {
emit((Boolean) foo); emit((Boolean) foo);
} }
} }
@ -430,7 +430,7 @@ public class BooleanScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeT
case "xor_param" -> (fieldName, params, lookup) -> (ctx) -> new BooleanFieldScript(fieldName, params, lookup, ctx) { case "xor_param" -> (fieldName, params, lookup) -> (ctx) -> new BooleanFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
for (Object foo : (List<?>) lookup.source().get("foo")) { for (Object foo : (List<?>) lookup.source().source().get("foo")) {
emit((Boolean) foo ^ ((Boolean) getParams().get("param"))); emit((Boolean) foo ^ ((Boolean) getParams().get("param")));
} }
} }

View file

@ -16,6 +16,7 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
@ -336,7 +337,8 @@ public class CompositeRuntimeFieldTests extends MapperServiceTestCase {
mapperService::fieldType, mapperService::fieldType,
(mft, lookupSupplier, fdo) -> mft.fielddataBuilder( (mft, lookupSupplier, fdo) -> mft.fielddataBuilder(
new FieldDataContext("test", lookupSupplier, mapperService.mappingLookup()::sourcePaths, fdo) new FieldDataContext("test", lookupSupplier, mapperService.mappingLookup()::sourcePaths, fdo)
).build(null, null) ).build(null, null),
new SourceLookup.ReaderSourceProvider()
); );
LeafSearchLookup leafSearchLookup = searchLookup.getLeafSearchLookup(reader.leaves().get(0)); LeafSearchLookup leafSearchLookup = searchLookup.getLeafSearchLookup(reader.leaves().get(0));

View file

@ -19,6 +19,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.DateFieldScript; import org.elasticsearch.script.DateFieldScript;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xcontent.json.JsonXContent;
@ -66,7 +67,7 @@ public class DateFieldScriptTests extends FieldScriptTestCase<DateFieldScript.Fa
DateFieldScript script = new DateFieldScript( DateFieldScript script = new DateFieldScript(
"test", "test",
Map.of(), Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null), new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
DateFormatter.forPattern(randomDateFormatterPattern()).withLocale(randomLocale(random())), DateFormatter.forPattern(randomDateFormatterPattern()).withLocale(randomLocale(random())),
reader.leaves().get(0) reader.leaves().get(0)
) { ) {
@ -102,7 +103,7 @@ public class DateFieldScriptTests extends FieldScriptTestCase<DateFieldScript.Fa
DateFieldScript.LeafFactory leafFactory = fromSource().newFactory( DateFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field", "field",
Collections.emptyMap(), Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null), new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
DateFormatter.forPattern("epoch_millis") DateFormatter.forPattern("epoch_millis")
); );
DateFieldScript dateFieldScript = leafFactory.newInstance(reader.leaves().get(0)); DateFieldScript dateFieldScript = leafFactory.newInstance(reader.leaves().get(0));

View file

@ -489,7 +489,7 @@ public class DateScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTest
) { ) {
@Override @Override
public void execute() { public void execute() {
for (Object timestamp : (List<?>) lookup.source().get("timestamp")) { for (Object timestamp : (List<?>) lookup.source().source().get("timestamp")) {
Parse parse = new Parse(this); Parse parse = new Parse(this);
emit(parse.parse(timestamp)); emit(parse.parse(timestamp));
} }
@ -504,7 +504,7 @@ public class DateScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTest
) { ) {
@Override @Override
public void execute() { public void execute() {
for (Object timestamp : (List<?>) lookup.source().get("timestamp")) { for (Object timestamp : (List<?>) lookup.source().source().get("timestamp")) {
long epoch = (Long) timestamp; long epoch = (Long) timestamp;
ZonedDateTime dt = ZonedDateTime.ofInstant(Instant.ofEpochMilli(epoch), ZoneId.of("UTC")); ZonedDateTime dt = ZonedDateTime.ofInstant(Instant.ofEpochMilli(epoch), ZoneId.of("UTC"));
dt = dt.plus(((Number) params.get("days")).longValue(), ChronoUnit.DAYS); dt = dt.plus(((Number) params.get("days")).longValue(), ChronoUnit.DAYS);

View file

@ -18,6 +18,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.DoubleFieldScript; import org.elasticsearch.script.DoubleFieldScript;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xcontent.json.JsonXContent;
@ -64,7 +65,7 @@ public class DoubleFieldScriptTests extends FieldScriptTestCase<DoubleFieldScrip
DoubleFieldScript script = new DoubleFieldScript( DoubleFieldScript script = new DoubleFieldScript(
"test", "test",
Map.of(), Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null), new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
) { ) {
@Override @Override
@ -99,7 +100,7 @@ public class DoubleFieldScriptTests extends FieldScriptTestCase<DoubleFieldScrip
DoubleFieldScript.LeafFactory leafFactory = fromSource().newFactory( DoubleFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field", "field",
Collections.emptyMap(), Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null) new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider())
); );
DoubleFieldScript doubleFieldScript = leafFactory.newInstance(reader.leaves().get(0)); DoubleFieldScript doubleFieldScript = leafFactory.newInstance(reader.leaves().get(0));
List<Double> results = new ArrayList<>(); List<Double> results = new ArrayList<>();

View file

@ -245,7 +245,7 @@ public class DoubleScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTe
case "read_foo" -> (fieldName, params, lookup) -> (ctx) -> new DoubleFieldScript(fieldName, params, lookup, ctx) { case "read_foo" -> (fieldName, params, lookup) -> (ctx) -> new DoubleFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
for (Object foo : (List<?>) lookup.source().get("foo")) { for (Object foo : (List<?>) lookup.source().source().get("foo")) {
emit(((Number) foo).doubleValue()); emit(((Number) foo).doubleValue());
} }
} }
@ -253,7 +253,7 @@ public class DoubleScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTe
case "add_param" -> (fieldName, params, lookup) -> (ctx) -> new DoubleFieldScript(fieldName, params, lookup, ctx) { case "add_param" -> (fieldName, params, lookup) -> (ctx) -> new DoubleFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
for (Object foo : (List<?>) lookup.source().get("foo")) { for (Object foo : (List<?>) lookup.source().source().get("foo")) {
emit(((Number) foo).doubleValue() + ((Number) getParams().get("param")).doubleValue()); emit(((Number) foo).doubleValue() + ((Number) getParams().get("param")).doubleValue());
} }
} }

View file

@ -17,6 +17,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.GeoPointFieldScript; import org.elasticsearch.script.GeoPointFieldScript;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
@ -59,7 +60,7 @@ public class GeoPointFieldScriptTests extends FieldScriptTestCase<GeoPointFieldS
GeoPointFieldScript script = new GeoPointFieldScript( GeoPointFieldScript script = new GeoPointFieldScript(
"test", "test",
Map.of(), Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null), new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
) { ) {
@Override @Override

View file

@ -235,7 +235,7 @@ public class GeoPointScriptFieldTypeTests extends AbstractNonTextScriptFieldType
case "fromLatLon" -> (fieldName, params, lookup) -> (ctx) -> new GeoPointFieldScript(fieldName, params, lookup, ctx) { case "fromLatLon" -> (fieldName, params, lookup) -> (ctx) -> new GeoPointFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
Map<?, ?> foo = (Map<?, ?>) lookup.source().get("foo"); Map<?, ?> foo = (Map<?, ?>) lookup.source().source().get("foo");
emit(((Number) foo.get("lat")).doubleValue(), ((Number) foo.get("lon")).doubleValue()); emit(((Number) foo.get("lat")).doubleValue(), ((Number) foo.get("lon")).doubleValue());
} }
}; };

View file

@ -14,6 +14,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -52,7 +53,8 @@ public class IgnoredFieldMapperTests extends MapperServiceTestCase {
iw -> { iw -> {
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
mapperService::fieldType, mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths) fieldDataLookup(mapperService.mappingLookup()::sourcePaths),
new SourceLookup.ReaderSourceProvider()
); );
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
when(searchExecutionContext.lookup()).thenReturn(lookup); when(searchExecutionContext.lookup()).thenReturn(lookup);

View file

@ -12,6 +12,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
@ -47,7 +48,11 @@ public class IndexFieldMapperTests extends MapperServiceTestCase {
iw.addDocument(mapperService.documentMapper().parse(source).rootDoc()); iw.addDocument(mapperService.documentMapper().parse(source).rootDoc());
}, iw -> { }, iw -> {
IndexFieldMapper.IndexFieldType ft = (IndexFieldMapper.IndexFieldType) mapperService.fieldType("_index"); IndexFieldMapper.IndexFieldType ft = (IndexFieldMapper.IndexFieldType) mapperService.fieldType("_index");
SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup(mapperService.mappingLookup()::sourcePaths)); SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths),
new SourceLookup.ReaderSourceProvider()
);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);
ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null); ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null);
IndexSearcher searcher = newSearcher(iw); IndexSearcher searcher = newSearcher(iw);

View file

@ -18,6 +18,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.IpFieldScript; import org.elasticsearch.script.IpFieldScript;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xcontent.json.JsonXContent;
@ -65,7 +66,7 @@ public class IpFieldScriptTests extends FieldScriptTestCase<IpFieldScript.Factor
IpFieldScript script = new IpFieldScript( IpFieldScript script = new IpFieldScript(
"test", "test",
Map.of(), Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null), new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
) { ) {
@Override @Override
@ -100,7 +101,7 @@ public class IpFieldScriptTests extends FieldScriptTestCase<IpFieldScript.Factor
IpFieldScript.LeafFactory leafFactory = fromSource().newFactory( IpFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field", "field",
Collections.emptyMap(), Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null) new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider())
); );
IpFieldScript ipFieldScript = leafFactory.newInstance(reader.leaves().get(0)); IpFieldScript ipFieldScript = leafFactory.newInstance(reader.leaves().get(0));
List<InetAddress> results = new ArrayList<>(); List<InetAddress> results = new ArrayList<>();

View file

@ -262,7 +262,7 @@ public class IpScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase {
case "read_foo" -> (fieldName, params, lookup) -> (ctx) -> new IpFieldScript(fieldName, params, lookup, ctx) { case "read_foo" -> (fieldName, params, lookup) -> (ctx) -> new IpFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
for (Object foo : (List<?>) lookup.source().get("foo")) { for (Object foo : (List<?>) lookup.source().source().get("foo")) {
emit(foo.toString()); emit(foo.toString());
} }
} }
@ -270,7 +270,7 @@ public class IpScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase {
case "append_param" -> (fieldName, params, lookup) -> (ctx) -> new IpFieldScript(fieldName, params, lookup, ctx) { case "append_param" -> (fieldName, params, lookup) -> (ctx) -> new IpFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
for (Object foo : (List<?>) lookup.source().get("foo")) { for (Object foo : (List<?>) lookup.source().source().get("foo")) {
emit(foo.toString() + getParams().get("param")); emit(foo.toString() + getParams().get("param"));
} }
} }

View file

@ -40,6 +40,7 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.StringFieldScript; import org.elasticsearch.script.StringFieldScript;
import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -292,6 +293,23 @@ public class KeywordScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase
} }
} }
public void testSyntheticSourceAccess() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
KeywordScriptFieldType fieldType = build("append_param", Map.of("param", "-suffix"));
expectThrows(
IllegalArgumentException.class,
() -> {
searcher.count(fieldType.termQuery("1-suffix", mockContext(true, null, new SourceLookup.NullSourceProvider())));
}
);
}
}
}
@Override @Override
protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) { protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) {
return ft.termQuery(randomAlphaOfLengthBetween(1, 1000), ctx); return ft.termQuery(randomAlphaOfLengthBetween(1, 1000), ctx);
@ -389,7 +407,7 @@ public class KeywordScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase
case "read_foo" -> (fieldName, params, lookup) -> ctx -> new StringFieldScript(fieldName, params, lookup, ctx) { case "read_foo" -> (fieldName, params, lookup) -> ctx -> new StringFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
for (Object foo : (List<?>) lookup.source().get("foo")) { for (Object foo : (List<?>) lookup.source().source().get("foo")) {
emit(foo.toString()); emit(foo.toString());
} }
} }
@ -397,7 +415,7 @@ public class KeywordScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase
case "append_param" -> (fieldName, params, lookup) -> ctx -> new StringFieldScript(fieldName, params, lookup, ctx) { case "append_param" -> (fieldName, params, lookup) -> ctx -> new StringFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
for (Object foo : (List<?>) lookup.source().get("foo")) { for (Object foo : (List<?>) lookup.source().source().get("foo")) {
emit(foo.toString() + getParams().get("param").toString()); emit(foo.toString() + getParams().get("param").toString());
} }
} }

View file

@ -18,6 +18,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.LongFieldScript; import org.elasticsearch.script.LongFieldScript;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xcontent.json.JsonXContent;
@ -64,7 +65,7 @@ public class LongFieldScriptTests extends FieldScriptTestCase<LongFieldScript.Fa
LongFieldScript script = new LongFieldScript( LongFieldScript script = new LongFieldScript(
"test", "test",
Map.of(), Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null), new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
) { ) {
@Override @Override
@ -99,7 +100,7 @@ public class LongFieldScriptTests extends FieldScriptTestCase<LongFieldScript.Fa
LongFieldScript.LeafFactory leafFactory = fromSource().newFactory( LongFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field", "field",
Collections.emptyMap(), Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null) new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider())
); );
LongFieldScript longFieldScript = leafFactory.newInstance(reader.leaves().get(0)); LongFieldScript longFieldScript = leafFactory.newInstance(reader.leaves().get(0));
List<Long> results = new ArrayList<>(); List<Long> results = new ArrayList<>();

View file

@ -278,7 +278,7 @@ public class LongScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTest
return (fieldName, params, lookup) -> (ctx) -> new LongFieldScript(fieldName, params, lookup, ctx) { return (fieldName, params, lookup) -> (ctx) -> new LongFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
for (Object foo : (List<?>) lookup.source().get("foo")) { for (Object foo : (List<?>) lookup.source().source().get("foo")) {
emit(((Number) foo).longValue()); emit(((Number) foo).longValue());
} }
} }
@ -287,7 +287,7 @@ public class LongScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTest
return (fieldName, params, lookup) -> (ctx) -> new LongFieldScript(fieldName, params, lookup, ctx) { return (fieldName, params, lookup) -> (ctx) -> new LongFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
for (Object foo : (List<?>) lookup.source().get("foo")) { for (Object foo : (List<?>) lookup.source().source().get("foo")) {
emit(((Number) foo).longValue() + ((Number) getParams().get("param")).longValue()); emit(((Number) foo).longValue() + ((Number) getParams().get("param")).longValue());
} }
} }
@ -298,7 +298,7 @@ public class LongScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTest
return (fieldName, params, lookup) -> (ctx) -> new LongFieldScript(fieldName, params, lookup, ctx) { return (fieldName, params, lookup) -> (ctx) -> new LongFieldScript(fieldName, params, lookup, ctx) {
@Override @Override
public void execute() { public void execute() {
for (Object timestamp : (List<?>) lookup.source().get("timestamp")) { for (Object timestamp : (List<?>) lookup.source().source().get("timestamp")) {
emit(now - ((Number) timestamp).longValue()); emit(now - ((Number) timestamp).longValue());
} }
} }

View file

@ -55,8 +55,7 @@ public class LookupRuntimeFieldTypeTests extends MapperServiceTestCase {
"""; """;
var mapperService = createMapperService(mapping); var mapperService = createMapperService(mapping);
XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("foo", List.of("f1", "f2")).endObject(); XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("foo", List.of("f1", "f2")).endObject();
SourceLookup sourceLookup = new SourceLookup(); SourceLookup sourceLookup = new SourceLookup(new SourceLookup.BytesSourceProvider(BytesReference.bytes(source)));
sourceLookup.setSource(BytesReference.bytes(source));
MappedFieldType fieldType = mapperService.fieldType("foo_lookup_field"); MappedFieldType fieldType = mapperService.fieldType("foo_lookup_field");
ValueFetcher valueFetcher = fieldType.valueFetcher(createSearchExecutionContext(mapperService), null); ValueFetcher valueFetcher = fieldType.valueFetcher(createSearchExecutionContext(mapperService), null);
DocumentField doc = valueFetcher.fetchDocumentField("foo_lookup_field", sourceLookup); DocumentField doc = valueFetcher.fetchDocumentField("foo_lookup_field", sourceLookup);
@ -111,8 +110,7 @@ public class LookupRuntimeFieldTypeTests extends MapperServiceTestCase {
source.field("foo", List.of()); source.field("foo", List.of());
} }
source.endObject(); source.endObject();
SourceLookup sourceLookup = new SourceLookup(); SourceLookup sourceLookup = new SourceLookup(new SourceLookup.BytesSourceProvider(BytesReference.bytes(source)));
sourceLookup.setSource(BytesReference.bytes(source));
MappedFieldType fieldType = mapperService.fieldType("foo_lookup_field"); MappedFieldType fieldType = mapperService.fieldType("foo_lookup_field");
ValueFetcher valueFetcher = fieldType.valueFetcher(createSearchExecutionContext(mapperService), null); ValueFetcher valueFetcher = fieldType.valueFetcher(createSearchExecutionContext(mapperService), null);
DocumentField doc = valueFetcher.fetchDocumentField("foo_lookup_field", sourceLookup); DocumentField doc = valueFetcher.fetchDocumentField("foo_lookup_field", sourceLookup);

View file

@ -18,6 +18,7 @@ import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FieldFetcher; import org.elasticsearch.search.fetch.subphase.FieldFetcher;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import java.util.Collections; import java.util.Collections;
@ -61,7 +62,11 @@ public class PlaceHolderFieldMapperTests extends MapperServiceTestCase {
.rootDoc() .rootDoc()
); );
}, iw -> { }, iw -> {
SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup(mapperService.mappingLookup()::sourcePaths)); SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths),
new SourceLookup.ReaderSourceProvider()
);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);
FieldFetcher fieldFetcher = FieldFetcher.create( FieldFetcher fieldFetcher = FieldFetcher.create(
searchExecutionContext, searchExecutionContext,

View file

@ -18,6 +18,7 @@ import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -77,7 +78,8 @@ public class ProvidedIdFieldMapperTests extends MapperServiceTestCase {
iw -> { iw -> {
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
mapperService::fieldType, mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths) fieldDataLookup(mapperService.mappingLookup()::sourcePaths),
new SourceLookup.ReaderSourceProvider()
); );
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
when(searchExecutionContext.lookup()).thenReturn(lookup); when(searchExecutionContext.lookup()).thenReturn(lookup);

View file

@ -13,6 +13,7 @@ import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.XContentType;
@ -70,7 +71,8 @@ public class RoutingFieldMapperTests extends MetadataMapperTestCase {
iw -> { iw -> {
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
mapperService::fieldType, mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths) fieldDataLookup(mapperService.mappingLookup()::sourcePaths),
new SourceLookup.ReaderSourceProvider()
); );
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
when(searchExecutionContext.lookup()).thenReturn(lookup); when(searchExecutionContext.lookup()).thenReturn(lookup);

View file

@ -18,6 +18,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.StringFieldScript; import org.elasticsearch.script.StringFieldScript;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xcontent.json.JsonXContent;
@ -63,7 +64,7 @@ public class StringFieldScriptTests extends FieldScriptTestCase<StringFieldScrip
StringFieldScript script = new StringFieldScript( StringFieldScript script = new StringFieldScript(
"test", "test",
Map.of(), Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null), new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
) { ) {
@Override @Override
@ -89,7 +90,7 @@ public class StringFieldScriptTests extends FieldScriptTestCase<StringFieldScrip
StringFieldScript script = new StringFieldScript( StringFieldScript script = new StringFieldScript(
"test", "test",
Map.of(), Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null), new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
) { ) {
@Override @Override
@ -129,7 +130,7 @@ public class StringFieldScriptTests extends FieldScriptTestCase<StringFieldScrip
StringFieldScript.LeafFactory leafFactory = fromSource().newFactory( StringFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field", "field",
Collections.emptyMap(), Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null) new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider())
); );
StringFieldScript stringFieldScript = leafFactory.newInstance(reader.leaves().get(0)); StringFieldScript stringFieldScript = leafFactory.newInstance(reader.leaves().get(0));
List<String> results = stringFieldScript.resultsForDoc(0); List<String> results = stringFieldScript.resultsForDoc(0);
@ -158,7 +159,7 @@ public class StringFieldScriptTests extends FieldScriptTestCase<StringFieldScrip
StringFieldScript.LeafFactory leafFactory = fromSource().newFactory( StringFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field", "field",
Collections.emptyMap(), Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null) new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider())
); );
StringFieldScript stringFieldScript = leafFactory.newInstance(reader.leaves().get(0)); StringFieldScript stringFieldScript = leafFactory.newInstance(reader.leaves().get(0));
List<String> results = stringFieldScript.resultsForDoc(0); List<String> results = stringFieldScript.resultsForDoc(0);

View file

@ -15,6 +15,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
@ -50,7 +51,11 @@ public class VersionFieldMapperTests extends MapperServiceTestCase {
iw.addDocument(parsedDoc.rootDoc()); iw.addDocument(parsedDoc.rootDoc());
}, iw -> { }, iw -> {
VersionFieldMapper.VersionFieldType ft = (VersionFieldMapper.VersionFieldType) mapperService.fieldType("_version"); VersionFieldMapper.VersionFieldType ft = (VersionFieldMapper.VersionFieldType) mapperService.fieldType("_version");
SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup(mapperService.mappingLookup()::sourcePaths)); SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths),
new SourceLookup.ReaderSourceProvider()
);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService); SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);
ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null); ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null);
IndexSearcher searcher = newSearcher(iw); IndexSearcher searcher = newSearcher(iw);

View file

@ -180,11 +180,10 @@ public class KeyedFlattenedFieldTypeTests extends FieldTypeTestCase {
when(searchExecutionContext.sourcePath("field.key")).thenReturn(Set.of("field.key")); when(searchExecutionContext.sourcePath("field.key")).thenReturn(Set.of("field.key"));
ValueFetcher fetcher = ft.valueFetcher(searchExecutionContext, null); ValueFetcher fetcher = ft.valueFetcher(searchExecutionContext, null);
SourceLookup lookup = new SourceLookup(); SourceLookup lookup = new SourceLookup(new SourceLookup.MapSourceProvider(Collections.singletonMap("field", sourceValue)));
lookup.setSource(Collections.singletonMap("field", sourceValue));
assertEquals(List.of("value"), fetcher.fetchValues(lookup, new ArrayList<Object>())); assertEquals(List.of("value"), fetcher.fetchValues(lookup, new ArrayList<Object>()));
lookup.setSource(Collections.singletonMap("field", null)); lookup.setSourceProvider(new SourceLookup.MapSourceProvider(Collections.singletonMap("field", null)));
assertEquals(List.of(), fetcher.fetchValues(lookup, new ArrayList<Object>())); assertEquals(List.of(), fetcher.fetchValues(lookup, new ArrayList<Object>()));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ft.valueFetcher(searchExecutionContext, "format")); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ft.valueFetcher(searchExecutionContext, "format"));

View file

@ -14,6 +14,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
@ -31,7 +32,7 @@ public class CompositeFieldScriptTests extends ESTestCase {
CompositeFieldScript script = new CompositeFieldScript( CompositeFieldScript script = new CompositeFieldScript(
"composite", "composite",
Collections.emptyMap(), Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, ftd) -> null), new SearchLookup(field -> null, (ft, lookup, ftd) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
) { ) {
@Override @Override
@ -62,7 +63,7 @@ public class CompositeFieldScriptTests extends ESTestCase {
CompositeFieldScript script = new CompositeFieldScript( CompositeFieldScript script = new CompositeFieldScript(
"composite", "composite",
Collections.emptyMap(), Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, ftd) -> null), new SearchLookup(field -> null, (ft, lookup, ftd) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
) { ) {
@Override @Override
@ -75,7 +76,7 @@ public class CompositeFieldScriptTests extends ESTestCase {
StringFieldScript stringFieldScript = new StringFieldScript( StringFieldScript stringFieldScript = new StringFieldScript(
"composite.leaf", "composite.leaf",
Collections.emptyMap(), Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, ftd) -> null), new SearchLookup(field -> null, (ft, lookup, ftd) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
) { ) {
@Override @Override

View file

@ -18,6 +18,7 @@ import org.apache.lucene.tests.index.RandomIndexWriter;
import org.elasticsearch.script.AbstractFieldScript; import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.SortedNumericDocValuesLongFieldScript; import org.elasticsearch.script.SortedNumericDocValuesLongFieldScript;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
@ -39,7 +40,7 @@ public class SortedNumericDocValuesLongFieldScriptTests extends ESTestCase {
try (DirectoryReader reader = iw.getReader()) { try (DirectoryReader reader = iw.getReader()) {
SortedNumericDocValuesLongFieldScript docValues = new SortedNumericDocValuesLongFieldScript( SortedNumericDocValuesLongFieldScript docValues = new SortedNumericDocValuesLongFieldScript(
"test", "test",
new SearchLookup(field -> null, (ft, lookup, ftd) -> null), new SearchLookup(field -> null, (ft, lookup, ftd) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
); );
List<Long> values = new ArrayList<>(); List<Long> values = new ArrayList<>();

View file

@ -19,6 +19,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.script.AbstractFieldScript; import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.SortedSetDocValuesStringFieldScript; import org.elasticsearch.script.SortedSetDocValuesStringFieldScript;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
@ -40,7 +41,7 @@ public class SortedSetDocValuesStringFieldScriptTests extends ESTestCase {
try (DirectoryReader reader = iw.getReader()) { try (DirectoryReader reader = iw.getReader()) {
SortedSetDocValuesStringFieldScript docValues = new SortedSetDocValuesStringFieldScript( SortedSetDocValuesStringFieldScript docValues = new SortedSetDocValuesStringFieldScript(
"test", "test",
new SearchLookup(field -> null, (ft, lookup, ftd) -> null), new SearchLookup(field -> null, (ft, lookup, ftd) -> null, new SourceLookup.ReaderSourceProvider()),
reader.leaves().get(0) reader.leaves().get(0)
); );
List<String> values = new ArrayList<>(); List<String> values = new ArrayList<>();

View file

@ -35,6 +35,7 @@ import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.CardinalityUpperBound; import org.elasticsearch.search.aggregations.CardinalityUpperBound;
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper; import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.runtime.StringScriptFieldTermQuery; import org.elasticsearch.search.runtime.StringScriptFieldTermQuery;
import java.io.IOException; import java.io.IOException;
@ -635,7 +636,7 @@ public class RangeAggregatorTests extends AggregatorTestCase {
*/ */
public void testRuntimeFieldTopLevelQueryNotOptimized() throws IOException { public void testRuntimeFieldTopLevelQueryNotOptimized() throws IOException {
long totalDocs = (long) RangeAggregator.DOCS_PER_RANGE_TO_USE_FILTERS * 4; long totalDocs = (long) RangeAggregator.DOCS_PER_RANGE_TO_USE_FILTERS * 4;
SearchLookup lookup = new SearchLookup(s -> null, (ft, l, ftd) -> null); SearchLookup lookup = new SearchLookup(s -> null, (ft, l, ftd) -> null, new SourceLookup.ReaderSourceProvider());
StringFieldScript.LeafFactory scriptFactory = ctx -> new StringFieldScript("dummy", Map.of(), lookup, ctx) { StringFieldScript.LeafFactory scriptFactory = ctx -> new StringFieldScript("dummy", Map.of(), lookup, ctx) {
@Override @Override
public void execute() { public void execute() {

View file

@ -111,6 +111,7 @@ import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType; import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.runtime.StringScriptFieldTermQuery; import org.elasticsearch.search.runtime.StringScriptFieldTermQuery;
import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.ScoreSortBuilder; import org.elasticsearch.search.sort.ScoreSortBuilder;
@ -2044,7 +2045,7 @@ public class TermsAggregatorTests extends AggregatorTestCase {
*/ */
public void testRuntimeFieldTopLevelNotOptimized() throws IOException { public void testRuntimeFieldTopLevelNotOptimized() throws IOException {
long totalDocs = 500; long totalDocs = 500;
SearchLookup lookup = new SearchLookup(s -> null, (ft, l, ftd) -> null); SearchLookup lookup = new SearchLookup(s -> null, (ft, l, ftd) -> null, new SourceLookup.ReaderSourceProvider());
StringFieldScript.LeafFactory scriptFactory = ctx -> new StringFieldScript("dummy", Map.of(), lookup, ctx) { StringFieldScript.LeafFactory scriptFactory = ctx -> new StringFieldScript("dummy", Map.of(), lookup, ctx) {
@Override @Override
public void execute() { public void execute() {

View file

@ -19,6 +19,7 @@ import org.elasticsearch.search.aggregations.support.values.ScriptDoubleValues;
import org.elasticsearch.search.aggregations.support.values.ScriptLongValues; import org.elasticsearch.search.aggregations.support.values.ScriptLongValues;
import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
@ -36,7 +37,7 @@ public class ScriptValuesTests extends ESTestCase {
int index; int index;
FakeAggregationScript(Object[][] values) { FakeAggregationScript(Object[][] values) {
super(Collections.emptyMap(), new SearchLookup(null, null) { super(Collections.emptyMap(), new SearchLookup(null, null, new SourceLookup.ReaderSourceProvider()) {
@Override @Override
public LeafSearchLookup getLeafSearchLookup(LeafReaderContext context) { public LeafSearchLookup getLeafSearchLookup(LeafReaderContext context) {

View file

@ -17,6 +17,7 @@ import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchContext; import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext; import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor; import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentFactory;
@ -175,7 +176,10 @@ public class FetchSourcePhaseTests extends ESTestCase {
MemoryIndex index = new MemoryIndex(); MemoryIndex index = new MemoryIndex();
LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0); LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0);
HitContext hitContext = new HitContext(searchHit, leafReaderContext, 1); HitContext hitContext = new HitContext(searchHit, leafReaderContext, 1);
hitContext.sourceLookup().setSource(source == null ? null : BytesReference.bytes(source)); hitContext.sourceLookup()
.setSourceProvider(
source == null ? new SourceLookup.NullSourceProvider() : new SourceLookup.BytesSourceProvider(BytesReference.bytes(source))
);
FetchSourcePhase phase = new FetchSourcePhase(); FetchSourcePhase phase = new FetchSourcePhase();
FetchSubPhaseProcessor processor = phase.getProcessor(fetchContext); FetchSubPhaseProcessor processor = phase.getProcessor(fetchContext);

View file

@ -244,7 +244,7 @@ public class FieldFetcherTests extends MapperServiceTestCase {
LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0); LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0);
fieldFetcher.setNextReader(readerContext); fieldFetcher.setNextReader(readerContext);
SourceLookup sourceLookup = new SourceLookup(); SourceLookup sourceLookup = new SourceLookup(new SourceLookup.ReaderSourceProvider());
sourceLookup.setSegmentAndDocument(readerContext, 0); sourceLookup.setSegmentAndDocument(readerContext, 0);
Map<String, DocumentField> fetchedFields = fieldFetcher.fetch(sourceLookup); Map<String, DocumentField> fetchedFields = fieldFetcher.fetch(sourceLookup);
@ -284,8 +284,7 @@ public class FieldFetcherTests extends MapperServiceTestCase {
public void testEmptyFetch() throws IOException { public void testEmptyFetch() throws IOException {
MapperService mapperService = createMapperService(); MapperService mapperService = createMapperService();
XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject(); XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("field", "value").endObject();
SourceLookup sourceLookup = new SourceLookup(); SourceLookup sourceLookup = new SourceLookup(new SourceLookup.BytesSourceProvider(BytesReference.bytes(source)));
sourceLookup.setSource(BytesReference.bytes(source));
{ {
// make sure that an empty fetch don't deserialize the document // make sure that an empty fetch don't deserialize the document
FieldFetcher fieldFetcher = FieldFetcher.create(newSearchExecutionContext(mapperService), List.of()); FieldFetcher fieldFetcher = FieldFetcher.create(newSearchExecutionContext(mapperService), List.of());
@ -1132,7 +1131,7 @@ public class FieldFetcherTests extends MapperServiceTestCase {
IndexSearcher searcher = newSearcher(iw); IndexSearcher searcher = newSearcher(iw);
LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0); LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0);
fieldFetcher.setNextReader(readerContext); fieldFetcher.setNextReader(readerContext);
Map<String, DocumentField> fields = fieldFetcher.fetch(new SourceLookup()); Map<String, DocumentField> fields = fieldFetcher.fetch(new SourceLookup(new SourceLookup.ReaderSourceProvider()));
assertEquals(1, fields.size()); assertEquals(1, fields.size());
DocumentField field = fields.get("runtime_field"); DocumentField field = fields.get("runtime_field");
assertEquals(1L, (long) field.getValue()); assertEquals(1L, (long) field.getValue());
@ -1164,7 +1163,7 @@ public class FieldFetcherTests extends MapperServiceTestCase {
IndexSearcher searcher = newSearcher(iw); IndexSearcher searcher = newSearcher(iw);
LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0); LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0);
fieldFetcher.setNextReader(readerContext); fieldFetcher.setNextReader(readerContext);
SourceLookup sourceLookup = new SourceLookup(); SourceLookup sourceLookup = new SourceLookup(new SourceLookup.ReaderSourceProvider());
sourceLookup.setSegmentAndDocument(readerContext, 0); sourceLookup.setSegmentAndDocument(readerContext, 0);
Map<String, DocumentField> fields = fieldFetcher.fetch(sourceLookup); Map<String, DocumentField> fields = fieldFetcher.fetch(sourceLookup);
assertEquals(1, fields.size()); assertEquals(1, fields.size());
@ -1186,8 +1185,9 @@ public class FieldFetcherTests extends MapperServiceTestCase {
throws IOException { throws IOException {
SourceLookup sourceLookup = null; SourceLookup sourceLookup = null;
if (source != null) { if (source != null) {
sourceLookup = new SourceLookup(); sourceLookup = new SourceLookup(new SourceLookup.BytesSourceProvider(BytesReference.bytes(source)));
sourceLookup.setSource(BytesReference.bytes(source)); } else {
sourceLookup = new SourceLookup(new SourceLookup.MapSourceProvider(Collections.emptyMap()));
} }
FieldFetcher fieldFetcher = FieldFetcher.create(newSearchExecutionContext(mapperService), fields); FieldFetcher fieldFetcher = FieldFetcher.create(newSearchExecutionContext(mapperService), fields);
return fieldFetcher.fetch(sourceLookup); return fieldFetcher.fetch(sourceLookup);

View file

@ -11,17 +11,17 @@ package org.elasticsearch.search.lookup;
import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentFactory;
import java.io.IOException; import java.io.IOException;
@ -34,20 +34,19 @@ public class SourceLookupTests extends ESTestCase {
try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir)) { try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir)) {
Document doc = new Document(); Document doc = new Document();
doc.add(new StringField("field", "value", Field.Store.YES)); doc.add(new StringField("field", "value", Field.Store.YES));
doc.add(new StoredField("_source", new BytesRef("{\"field\": \"value\"}")));
iw.addDocument(doc); iw.addDocument(doc);
try (IndexReader reader = iw.getReader()) { try (IndexReader reader = iw.getReader()) {
LeafReaderContext readerContext = reader.leaves().get(0); LeafReaderContext readerContext = reader.leaves().get(0);
SourceLookup sourceLookup = new SourceLookup(); SourceLookup sourceLookup = new SourceLookup(new SourceLookup.ReaderSourceProvider());
sourceLookup.setSegmentAndDocument(readerContext, 42); sourceLookup.setSegmentAndDocument(readerContext, 0);
sourceLookup.setSource( sourceLookup.source();
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject())
);
assertNotNull(sourceLookup.internalSourceRef()); assertNotNull(sourceLookup.internalSourceRef());
// Source should be preserved if we pass in the same reader and document // Source should be preserved if we pass in the same reader and document
sourceLookup.setSegmentAndDocument(readerContext, 42); sourceLookup.setSegmentAndDocument(readerContext, 0);
assertNotNull(sourceLookup.internalSourceRef()); assertNotNull(sourceLookup.internalSourceRef());
// Check that the stored fields reader is not loaded eagerly // Check that the stored fields reader is not loaded eagerly

View file

@ -28,6 +28,7 @@ import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.lookup.LeafSearchLookup; import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
@ -49,7 +50,7 @@ public class ScriptScoreQueryTests extends ESTestCase {
private DirectoryReader reader; private DirectoryReader reader;
private IndexSearcher searcher; private IndexSearcher searcher;
private LeafReaderContext leafReaderContext; private LeafReaderContext leafReaderContext;
private final SearchLookup lookup = new SearchLookup(null, null); private final SearchLookup lookup = new SearchLookup(null, null, new SourceLookup.ReaderSourceProvider());
@Before @Before
public void initSearcher() throws IOException { public void initSearcher() throws IOException {

View file

@ -25,6 +25,7 @@ import org.elasticsearch.script.AbstractLongFieldScript;
import org.elasticsearch.script.GeoPointFieldScript; import org.elasticsearch.script.GeoPointFieldScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
@ -82,7 +83,7 @@ public class GeoPointScriptFieldDistanceFeatureQueryTests extends AbstractScript
iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"location\": [-3.56, -45.98]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"location\": [-3.56, -45.98]}"))));
try (DirectoryReader reader = iw.getReader()) { try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader); IndexSearcher searcher = newSearcher(reader);
SearchLookup searchLookup = new SearchLookup(null, null); SearchLookup searchLookup = new SearchLookup(null, null, new SourceLookup.ReaderSourceProvider());
Function<LeafReaderContext, GeoPointFieldScript> leafFactory = ctx -> new GeoPointFieldScript( Function<LeafReaderContext, GeoPointFieldScript> leafFactory = ctx -> new GeoPointFieldScript(
"test", "test",
Map.of(), Map.of(),
@ -91,7 +92,7 @@ public class GeoPointScriptFieldDistanceFeatureQueryTests extends AbstractScript
) { ) {
@Override @Override
public void execute() { public void execute() {
GeoPoint point = GeoUtils.parseGeoPoint(searchLookup.source().get("location"), true); GeoPoint point = GeoUtils.parseGeoPoint(searchLookup.source().source().get("location"), true);
emit(point.lat(), point.lon()); emit(point.lat(), point.lon());
} }
}; };

View file

@ -21,6 +21,7 @@ import org.elasticsearch.script.AbstractLongFieldScript;
import org.elasticsearch.script.DateFieldScript; import org.elasticsearch.script.DateFieldScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
@ -68,7 +69,7 @@ public class LongScriptFieldDistanceFeatureQueryTests extends AbstractScriptFiel
iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}")))); iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}"))));
try (DirectoryReader reader = iw.getReader()) { try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader); IndexSearcher searcher = newSearcher(reader);
SearchLookup searchLookup = new SearchLookup(null, null); SearchLookup searchLookup = new SearchLookup(null, null, new SourceLookup.ReaderSourceProvider());
Function<LeafReaderContext, AbstractLongFieldScript> leafFactory = ctx -> new DateFieldScript( Function<LeafReaderContext, AbstractLongFieldScript> leafFactory = ctx -> new DateFieldScript(
"test", "test",
Map.of(), Map.of(),
@ -78,7 +79,7 @@ public class LongScriptFieldDistanceFeatureQueryTests extends AbstractScriptFiel
) { ) {
@Override @Override
public void execute() { public void execute() {
for (Object timestamp : (List<?>) searchLookup.source().get("timestamp")) { for (Object timestamp : (List<?>) searchLookup.source().source().get("timestamp")) {
emit(((Number) timestamp).longValue()); emit(((Number) timestamp).longValue());
} }
} }

View file

@ -55,8 +55,7 @@ public abstract class FieldTypeTestCase extends ESTestCase {
when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field)); when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field));
ValueFetcher fetcher = fieldType.valueFetcher(searchExecutionContext, format); ValueFetcher fetcher = fieldType.valueFetcher(searchExecutionContext, format);
SourceLookup lookup = new SourceLookup(); SourceLookup lookup = new SourceLookup(new SourceLookup.MapSourceProvider(Collections.singletonMap(field, sourceValue)));
lookup.setSource(Collections.singletonMap(field, sourceValue));
return fetcher.fetchValues(lookup, new ArrayList<>()); return fetcher.fetchValues(lookup, new ArrayList<>());
} }
@ -67,8 +66,7 @@ public abstract class FieldTypeTestCase extends ESTestCase {
when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field)); when(searchExecutionContext.sourcePath(field)).thenReturn(Set.of(field));
ValueFetcher fetcher = fieldType.valueFetcher(searchExecutionContext, null); ValueFetcher fetcher = fieldType.valueFetcher(searchExecutionContext, null);
SourceLookup lookup = new SourceLookup(); SourceLookup lookup = new SourceLookup(new SourceLookup.MapSourceProvider(Collections.singletonMap(field, List.of(values))));
lookup.setSource(Collections.singletonMap(field, List.of(values)));
return fetcher.fetchValues(lookup, new ArrayList<>()); return fetcher.fetchValues(lookup, new ArrayList<>());
} }
} }

View file

@ -333,7 +333,8 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
iw -> { iw -> {
SearchLookup lookup = new SearchLookup( SearchLookup lookup = new SearchLookup(
mapperService::fieldType, mapperService::fieldType,
fieldDataLookup(mapperService.mappingLookup()::sourcePaths) fieldDataLookup(mapperService.mappingLookup()::sourcePaths),
new SourceLookup.ReaderSourceProvider()
); );
ValueFetcher valueFetcher = new DocValueFetcher(format, lookup.getForField(ft, MappedFieldType.FielddataOperation.SEARCH)); ValueFetcher valueFetcher = new DocValueFetcher(format, lookup.getForField(ft, MappedFieldType.FielddataOperation.SEARCH));
IndexSearcher searcher = newSearcher(iw); IndexSearcher searcher = newSearcher(iw);
@ -605,7 +606,7 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format); ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format);
ParsedDocument doc = mapperService.documentMapper().parse(source); ParsedDocument doc = mapperService.documentMapper().parse(source);
withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> { withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> {
SourceLookup sourceLookup = new SourceLookup(); SourceLookup sourceLookup = new SourceLookup(new SourceLookup.ReaderSourceProvider());
sourceLookup.setSegmentAndDocument(ir.leaves().get(0), 0); sourceLookup.setSegmentAndDocument(ir.leaves().get(0), 0);
docValueFetcher.setNextReader(ir.leaves().get(0)); docValueFetcher.setNextReader(ir.leaves().get(0));
nativeFetcher.setNextReader(ir.leaves().get(0)); nativeFetcher.setNextReader(ir.leaves().get(0));
@ -714,7 +715,11 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
SourceToParse source = source(this::writeField); SourceToParse source = source(this::writeField);
ParsedDocument doc = mapperService.documentMapper().parse(source); ParsedDocument doc = mapperService.documentMapper().parse(source);
SearchLookup lookup = new SearchLookup(f -> fieldType, (f, s, t) -> { throw new UnsupportedOperationException(); }); SearchLookup lookup = new SearchLookup(
f -> fieldType,
(f, s, t) -> { throw new UnsupportedOperationException(); },
new SourceLookup.ReaderSourceProvider()
);
withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), ir -> { withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), ir -> {

View file

@ -88,7 +88,7 @@ public class DataTierFieldTypeTests extends MapperServiceTestCase {
public void testFetchValue() throws IOException { public void testFetchValue() throws IOException {
MappedFieldType ft = DataTierFieldMapper.DataTierFieldType.INSTANCE; MappedFieldType ft = DataTierFieldMapper.DataTierFieldType.INSTANCE;
SourceLookup lookup = new SourceLookup(); SourceLookup lookup = new SourceLookup(new SourceLookup.NullSourceProvider());
List<Object> ignoredValues = new ArrayList<>(); List<Object> ignoredValues = new ArrayList<>();
ValueFetcher valueFetcher = ft.valueFetcher(createContext(), null); ValueFetcher valueFetcher = ft.valueFetcher(createContext(), null);

View file

@ -27,6 +27,7 @@ import org.elasticsearch.script.DocReader;
import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType; import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric; import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric;
@ -124,7 +125,8 @@ public class AggregateDoubleMetricFieldTypeTests extends FieldTypeTestCase {
searchExecutionContext::getFieldType, searchExecutionContext::getFieldType,
(mft, lookupSupplier, fdo) -> mft.fielddataBuilder( (mft, lookupSupplier, fdo) -> mft.fielddataBuilder(
new FieldDataContext("test", lookupSupplier, searchExecutionContext::sourcePath, fdo) new FieldDataContext("test", lookupSupplier, searchExecutionContext::sourcePath, fdo)
).build(null, null) ).build(null, null),
new SourceLookup.ReaderSourceProvider()
); );
when(searchExecutionContext.lookup()).thenReturn(lookup); when(searchExecutionContext.lookup()).thenReturn(lookup);
IndexSearcher searcher = newSearcher(reader); IndexSearcher searcher = newSearcher(reader);

View file

@ -111,9 +111,8 @@ public class ConstantKeywordFieldTypeTests extends FieldTypeTestCase {
MappedFieldType fieldType = new ConstantKeywordFieldMapper.ConstantKeywordFieldType("field", null); MappedFieldType fieldType = new ConstantKeywordFieldMapper.ConstantKeywordFieldType("field", null);
ValueFetcher fetcher = fieldType.valueFetcher(null, null); ValueFetcher fetcher = fieldType.valueFetcher(null, null);
SourceLookup missingValueLookup = new SourceLookup(); SourceLookup missingValueLookup = new SourceLookup(new SourceLookup.ReaderSourceProvider());
SourceLookup nullValueLookup = new SourceLookup(); SourceLookup nullValueLookup = new SourceLookup(new SourceLookup.MapSourceProvider(Collections.singletonMap("field", null)));
nullValueLookup.setSource(Collections.singletonMap("field", null));
List<Object> ignoredValues = new ArrayList<>(); List<Object> ignoredValues = new ArrayList<>();
assertTrue(fetcher.fetchValues(missingValueLookup, ignoredValues).isEmpty()); assertTrue(fetcher.fetchValues(missingValueLookup, ignoredValues).isEmpty());