Replace SourceLookup with SourceProvider interface (#91540)

SourceLookup mixes up several concerns - lazy loading, map access to scripts,
different access providers - and duplicates logic (such as that choosing how to
apply filtering) that is better handled directly in the Source interface.

This commit removes SourceLookup entirely and replaces it with a new
SourceProvider interface, with a simple stored fields reader implementation.
SearchLookup implements this interface directly, and the fetch phase uses
a custom implementation to provide its separately loaded source to fetch-time
scripts.
This commit is contained in:
Alan Woodward 2023-01-12 16:17:46 +00:00 committed by GitHub
parent 1bb660c810
commit c720cdbbbf
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
89 changed files with 659 additions and 939 deletions

View file

@ -40,7 +40,7 @@ import org.elasticsearch.script.DocValuesDocReader;
import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
@ -90,7 +90,7 @@ public class ScriptScoreBenchmark {
private final SearchLookup lookup = new SearchLookup(
fieldTypes::get,
(mft, lookup, fdo) -> mft.fielddataBuilder(FieldDataContext.noRuntimeFields("benchmark")).build(fieldDataCache, breakerService),
new SourceLookup.ReaderSourceProvider()
SourceProvider.fromStoredFields()
);
@Param({ "expression", "metal", "painless_cast", "painless_def" })

View file

@ -15,8 +15,9 @@ import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.script.FieldScript;
import org.elasticsearch.script.ScriptException;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.text.ParseException;
@ -53,7 +54,7 @@ public class ExpressionFieldScriptTests extends ESTestCase {
lookup = new SearchLookup(
field -> field.equals("field") ? fieldType : null,
(ignored, _lookup, fdt) -> fieldData,
new SourceLookup.ReaderSourceProvider()
(ctx, doc) -> Source.empty(XContentType.JSON)
);
}

View file

@ -17,8 +17,9 @@ import org.elasticsearch.script.DocValuesDocReader;
import org.elasticsearch.script.NumberSortScript;
import org.elasticsearch.script.ScriptException;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.text.ParseException;
@ -54,7 +55,7 @@ public class ExpressionNumberSortScriptTests extends ESTestCase {
lookup = new SearchLookup(
field -> field.equals("field") ? fieldType : null,
(ignored, _lookup, fdt) -> fieldData,
new SourceLookup.ReaderSourceProvider()
(ctx, doc) -> Source.empty(XContentType.JSON)
);
}

View file

@ -16,8 +16,9 @@ import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.script.ScriptException;
import org.elasticsearch.script.TermsSetQueryScript;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.text.ParseException;
@ -53,7 +54,7 @@ public class ExpressionTermsSetQueryTests extends ESTestCase {
lookup = new SearchLookup(
field -> field.equals("field") ? fieldType : null,
(ignored, _lookup, fdt) -> fieldData,
new SourceLookup.ReaderSourceProvider()
(ctx, doc) -> Source.empty(XContentType.JSON)
);
}

View file

@ -16,7 +16,8 @@ import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptedMetricAggContexts;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.ArrayList;
@ -24,6 +25,7 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ -110,9 +112,8 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase {
SearchLookup lookup = mock(SearchLookup.class);
LeafSearchLookup leafLookup = mock(LeafSearchLookup.class);
when(lookup.getLeafSearchLookup(leafReaderContext)).thenReturn(leafLookup);
SourceLookup sourceLookup = mock(SourceLookup.class);
when(leafLookup.asMap()).thenReturn(Collections.singletonMap("_source", sourceLookup));
when(sourceLookup.source()).thenReturn(Collections.singletonMap("test", 1));
Supplier<Source> source = () -> Source.fromMap(Map.of("test", 1), XContentType.JSON);
when(leafLookup.asMap()).thenReturn(Collections.singletonMap("_source", source));
ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, lookup);
ScriptedMetricAggContexts.MapScript script = leafFactory.newInstance(leafReaderContext);
@ -141,9 +142,8 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase {
SearchLookup lookup = mock(SearchLookup.class);
LeafSearchLookup leafLookup = mock(LeafSearchLookup.class);
when(lookup.getLeafSearchLookup(leafReaderContext)).thenReturn(leafLookup);
SourceLookup sourceLookup = mock(SourceLookup.class);
when(leafLookup.asMap()).thenReturn(Collections.singletonMap("_source", sourceLookup));
when(sourceLookup.source()).thenReturn(Collections.singletonMap("three", 3));
Supplier<Source> source = () -> Source.fromMap(Map.of("three", 3), XContentType.JSON);
when(leafLookup.asMap()).thenReturn(Collections.singletonMap("_source", source));
ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, lookup);
ScriptedMetricAggContexts.MapScript script = leafFactory.newInstance(leafReaderContext);

View file

@ -53,7 +53,7 @@ import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.script.field.TextDocValuesField;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.XContentBuilder;
import java.io.IOException;
@ -211,14 +211,13 @@ public class MatchOnlyTextFieldMapper extends FieldMapper {
};
};
}
SourceLookup sourceLookup = searchExecutionContext.lookup().source();
ValueFetcher valueFetcher = valueFetcher(searchExecutionContext, null);
SourceProvider sourceProvider = searchExecutionContext.lookup();
return context -> {
valueFetcher.setNextReader(context);
return docID -> {
try {
sourceLookup.setSegmentAndDocument(context, docID);
return valueFetcher.fetchValues(sourceLookup, docID, new ArrayList<>());
return valueFetcher.fetchValues(sourceProvider.getSource(context, docID), docID, new ArrayList<>());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
@ -338,7 +337,7 @@ public class MatchOnlyTextFieldMapper extends FieldMapper {
name(),
CoreValuesSourceType.KEYWORD,
SourceValueFetcher.toString(fieldDataContext.sourcePathsLookup().apply(name())),
fieldDataContext.lookupSupplier().get().source(),
fieldDataContext.lookupSupplier().get(),
TextDocValuesField::new
);
}

View file

@ -310,7 +310,7 @@ public class ScaledFloatFieldMapper extends FieldMapper {
name(),
valuesSourceType,
sourceValueFetcher(sourcePaths),
searchLookup.source(),
searchLookup,
ScaledFloatDocValuesField::new
);
}

View file

@ -28,7 +28,7 @@ import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.lookup.FieldLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
@ -53,6 +53,7 @@ import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.function.Function;
import java.util.function.Supplier;
import static java.util.Collections.singleton;
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
@ -142,7 +143,7 @@ public class SearchFieldsIT extends ESIntegTestCase {
static Object sourceScript(Map<String, Object> vars, String path) {
@SuppressWarnings("unchecked")
Map<String, Object> source = ((SourceLookup) vars.get("_source")).source();
Map<String, Object> source = ((Supplier<Source>) vars.get("_source")).get().source();
return XContentMapValues.extractValue(path, source);
}

View file

@ -25,7 +25,7 @@ import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.Collection;
@ -34,6 +34,7 @@ import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.function.Supplier;
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS;
import static org.elasticsearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_SHARDS;
@ -60,9 +61,10 @@ public class SearchStatsIT extends ESIntegTestCase {
public static class CustomScriptPlugin extends MockScriptPlugin {
@Override
@SuppressWarnings("unchecked")
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
return Collections.singletonMap("_source.field", vars -> {
Map<?, ?> src = ((SourceLookup) vars.get("_source")).source();
Map<?, ?> src = ((Supplier<Source>) vars.get("_source")).get().source();
return src.get("field");
});
}

View file

@ -17,7 +17,7 @@ import org.elasticsearch.script.field.ToScriptFieldFactory;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
@ -30,20 +30,20 @@ public abstract class SourceValueFetcherIndexFieldData<T>
protected final String fieldName;
protected final ValuesSourceType valuesSourceType;
protected final ValueFetcher valueFetcher;
protected final SourceLookup sourceLookup;
protected final SourceProvider sourceProvider;
protected final ToScriptFieldFactory<T> toScriptFieldFactory;
public Builder(
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<T> toScriptFieldFactory
) {
this.fieldName = fieldName;
this.valuesSourceType = valuesSourceType;
this.valueFetcher = valueFetcher;
this.sourceLookup = sourceLookup;
this.sourceProvider = sourceProvider;
this.toScriptFieldFactory = toScriptFieldFactory;
}
}
@ -51,20 +51,20 @@ public abstract class SourceValueFetcherIndexFieldData<T>
protected final String fieldName;
protected final ValuesSourceType valuesSourceType;
protected final ValueFetcher valueFetcher;
protected final SourceLookup sourceLookup;
protected final SourceProvider sourceProvider;
protected final ToScriptFieldFactory<T> toScriptFieldFactory;
protected SourceValueFetcherIndexFieldData(
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<T> toScriptFieldFactory
) {
this.fieldName = fieldName;
this.valuesSourceType = valuesSourceType;
this.valueFetcher = valueFetcher;
this.sourceLookup = sourceLookup;
this.sourceProvider = sourceProvider;
this.toScriptFieldFactory = toScriptFieldFactory;
}
@ -112,18 +112,18 @@ public abstract class SourceValueFetcherIndexFieldData<T>
protected final LeafReaderContext leafReaderContext;
protected final ValueFetcher valueFetcher;
protected final SourceLookup sourceLookup;
protected final SourceProvider sourceProvider;
public SourceValueFetcherLeafFieldData(
ToScriptFieldFactory<T> toScriptFieldFactory,
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
this.toScriptFieldFactory = toScriptFieldFactory;
this.leafReaderContext = leafReaderContext;
this.valueFetcher = valueFetcher;
this.sourceLookup = sourceLookup;
this.sourceProvider = sourceProvider;
}
@Override

View file

@ -15,7 +15,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.script.field.DocValuesScriptFieldFactory;
import org.elasticsearch.script.field.ToScriptFieldFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceProvider;
import java.io.IOException;
import java.util.Collections;
@ -30,10 +31,10 @@ public class SourceValueFetcherMultiGeoPointIndexFieldData extends SourceValueFe
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<MultiGeoPointValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
@ -42,7 +43,7 @@ public class SourceValueFetcherMultiGeoPointIndexFieldData extends SourceValueFe
fieldName,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
toScriptFieldFactory
);
}
@ -52,15 +53,15 @@ public class SourceValueFetcherMultiGeoPointIndexFieldData extends SourceValueFe
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<MultiGeoPointValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
public SourceValueFetcherMultiGeoPointLeafFieldData loadDirect(LeafReaderContext context) throws Exception {
return new SourceValueFetcherMultiGeoPointLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceLookup);
return new SourceValueFetcherMultiGeoPointLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceProvider);
}
public static class SourceValueFetcherMultiGeoPointLeafFieldData extends
@ -70,15 +71,15 @@ public class SourceValueFetcherMultiGeoPointIndexFieldData extends SourceValueFe
ToScriptFieldFactory<MultiGeoPointValues> toScriptFieldFactory,
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceLookup);
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceProvider);
}
@Override
public DocValuesScriptFieldFactory getScriptFieldFactory(String name) {
return toScriptFieldFactory.getScriptFieldFactory(
new MultiGeoPointValues(new SourceValueFetcherMultiGeoPointDocValues(leafReaderContext, valueFetcher, sourceLookup)),
new MultiGeoPointValues(new SourceValueFetcherMultiGeoPointDocValues(leafReaderContext, valueFetcher, sourceProvider)),
name
);
}
@ -90,18 +91,17 @@ public class SourceValueFetcherMultiGeoPointIndexFieldData extends SourceValueFe
public SourceValueFetcherMultiGeoPointDocValues(
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
super(leafReaderContext, valueFetcher, sourceLookup);
super(leafReaderContext, valueFetcher, sourceProvider);
}
@Override
@SuppressWarnings("unchecked")
public boolean advanceExact(int doc) throws IOException {
sourceLookup.setSegmentAndDocument(leafReaderContext, doc);
values.clear();
for (Object value : valueFetcher.fetchValues(sourceLookup, doc, Collections.emptyList())) {
Source source = sourceProvider.getSource(leafReaderContext, doc);
for (Object value : valueFetcher.fetchValues(source, doc, Collections.emptyList())) {
assert value instanceof Map && ((Map<Object, Object>) value).get("coordinates") instanceof List;
List<Object> coordinates = ((Map<String, List<Object>>) value).get("coordinates");
assert coordinates.size() == 2 && coordinates.get(1) instanceof Number && coordinates.get(0) instanceof Number;

View file

@ -15,7 +15,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.script.field.DocValuesScriptFieldFactory;
import org.elasticsearch.script.field.ToScriptFieldFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceProvider;
import java.io.IOException;
import java.util.Collections;
@ -31,10 +32,10 @@ public class SourceValueFetcherSortedBinaryIndexFieldData extends SourceValueFet
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedBinaryDocValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
@ -43,7 +44,7 @@ public class SourceValueFetcherSortedBinaryIndexFieldData extends SourceValueFet
fieldName,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
toScriptFieldFactory
);
}
@ -53,15 +54,15 @@ public class SourceValueFetcherSortedBinaryIndexFieldData extends SourceValueFet
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedBinaryDocValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
public SourceValueFetcherSortedBinaryLeafFieldData loadDirect(LeafReaderContext context) throws Exception {
return new SourceValueFetcherSortedBinaryLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceLookup);
return new SourceValueFetcherSortedBinaryLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceProvider);
}
public static class SourceValueFetcherSortedBinaryLeafFieldData extends SourceValueFetcherLeafFieldData<SortedBinaryDocValues> {
@ -70,15 +71,15 @@ public class SourceValueFetcherSortedBinaryIndexFieldData extends SourceValueFet
ToScriptFieldFactory<SortedBinaryDocValues> toScriptFieldFactory,
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceLookup);
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceProvider);
}
@Override
public DocValuesScriptFieldFactory getScriptFieldFactory(String name) {
return toScriptFieldFactory.getScriptFieldFactory(
new SourceValueFetcherSortedBinaryDocValues(leafReaderContext, valueFetcher, sourceLookup),
new SourceValueFetcherSortedBinaryDocValues(leafReaderContext, valueFetcher, sourceProvider),
name
);
}
@ -89,7 +90,7 @@ public class SourceValueFetcherSortedBinaryIndexFieldData extends SourceValueFet
private final LeafReaderContext leafReaderContext;
private final ValueFetcher valueFetcher;
private final SourceLookup sourceLookup;
private final SourceProvider sourceProvider;
private final SortedSet<BytesRef> values;
private Iterator<BytesRef> iterator;
@ -97,21 +98,20 @@ public class SourceValueFetcherSortedBinaryIndexFieldData extends SourceValueFet
public SourceValueFetcherSortedBinaryDocValues(
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
this.leafReaderContext = leafReaderContext;
this.valueFetcher = valueFetcher;
this.sourceLookup = sourceLookup;
this.sourceProvider = sourceProvider;
values = new TreeSet<>();
}
@Override
public boolean advanceExact(int doc) throws IOException {
sourceLookup.setSegmentAndDocument(leafReaderContext, doc);
values.clear();
for (Object object : valueFetcher.fetchValues(sourceLookup, doc, Collections.emptyList())) {
Source source = sourceProvider.getSource(leafReaderContext, doc);
for (Object object : valueFetcher.fetchValues(source, doc, Collections.emptyList())) {
values.add(new BytesRef(object.toString()));
}

View file

@ -15,7 +15,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.script.field.DocValuesScriptFieldFactory;
import org.elasticsearch.script.field.ToScriptFieldFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceProvider;
import java.io.IOException;
import java.util.Collections;
@ -28,10 +29,10 @@ public class SourceValueFetcherSortedBooleanIndexFieldData extends SourceValueFe
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedNumericDocValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
@ -40,7 +41,7 @@ public class SourceValueFetcherSortedBooleanIndexFieldData extends SourceValueFe
fieldName,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
toScriptFieldFactory
);
}
@ -50,15 +51,15 @@ public class SourceValueFetcherSortedBooleanIndexFieldData extends SourceValueFe
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedNumericDocValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
public SourceValueFetcherLeafFieldData<SortedNumericDocValues> loadDirect(LeafReaderContext context) throws Exception {
return new SourceValueFetcherSortedBooleanLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceLookup);
return new SourceValueFetcherSortedBooleanLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceProvider);
}
private static class SourceValueFetcherSortedBooleanLeafFieldData extends SourceValueFetcherLeafFieldData<SortedNumericDocValues> {
@ -67,15 +68,15 @@ public class SourceValueFetcherSortedBooleanIndexFieldData extends SourceValueFe
ToScriptFieldFactory<SortedNumericDocValues> toScriptFieldFactory,
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceLookup);
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceProvider);
}
@Override
public DocValuesScriptFieldFactory getScriptFieldFactory(String name) {
return toScriptFieldFactory.getScriptFieldFactory(
new SourceValueFetcherSortedBooleanDocValues(leafReaderContext, valueFetcher, sourceLookup),
new SourceValueFetcherSortedBooleanDocValues(leafReaderContext, valueFetcher, sourceProvider),
name
);
}
@ -86,7 +87,7 @@ public class SourceValueFetcherSortedBooleanIndexFieldData extends SourceValueFe
private final LeafReaderContext leafReaderContext;
private final ValueFetcher valueFetcher;
private final SourceLookup sourceLookup;
private final SourceProvider sourceProvider;
private int trueCount;
private int falseCount;
@ -95,21 +96,19 @@ public class SourceValueFetcherSortedBooleanIndexFieldData extends SourceValueFe
SourceValueFetcherSortedBooleanDocValues(
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
this.leafReaderContext = leafReaderContext;
this.valueFetcher = valueFetcher;
this.sourceLookup = sourceLookup;
this.sourceProvider = sourceProvider;
}
@Override
public boolean advanceExact(int doc) throws IOException {
sourceLookup.setSegmentAndDocument(leafReaderContext, doc);
trueCount = 0;
falseCount = 0;
for (Object value : valueFetcher.fetchValues(sourceLookup, doc, Collections.emptyList())) {
Source source = sourceProvider.getSource(leafReaderContext, doc);
for (Object value : valueFetcher.fetchValues(source, doc, Collections.emptyList())) {
assert value instanceof Boolean;
if ((Boolean) value) {
++trueCount;

View file

@ -14,7 +14,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.script.field.DocValuesScriptFieldFactory;
import org.elasticsearch.script.field.ToScriptFieldFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceProvider;
import java.io.IOException;
import java.util.ArrayList;
@ -30,10 +31,10 @@ public class SourceValueFetcherSortedDoubleIndexFieldData extends SourceValueFet
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedNumericDoubleValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
@ -42,7 +43,7 @@ public class SourceValueFetcherSortedDoubleIndexFieldData extends SourceValueFet
fieldName,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
toScriptFieldFactory
);
}
@ -52,15 +53,15 @@ public class SourceValueFetcherSortedDoubleIndexFieldData extends SourceValueFet
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedNumericDoubleValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
public SourceValueFetcherLeafFieldData<SortedNumericDoubleValues> loadDirect(LeafReaderContext context) throws Exception {
return new SourceValueFetcherSortedDoubleLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceLookup);
return new SourceValueFetcherSortedDoubleLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceProvider);
}
private static class SourceValueFetcherSortedDoubleLeafFieldData extends SourceValueFetcherLeafFieldData<SortedNumericDoubleValues> {
@ -69,15 +70,15 @@ public class SourceValueFetcherSortedDoubleIndexFieldData extends SourceValueFet
ToScriptFieldFactory<SortedNumericDoubleValues> toScriptFieldFactory,
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceLookup);
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceProvider);
}
@Override
public DocValuesScriptFieldFactory getScriptFieldFactory(String name) {
return toScriptFieldFactory.getScriptFieldFactory(
new SourceValueFetcherSortedNumericDoubleValues(leafReaderContext, valueFetcher, sourceLookup),
new SourceValueFetcherSortedNumericDoubleValues(leafReaderContext, valueFetcher, sourceProvider),
name
);
}
@ -88,7 +89,7 @@ public class SourceValueFetcherSortedDoubleIndexFieldData extends SourceValueFet
private final LeafReaderContext leafReaderContext;
private final ValueFetcher valueFetcher;
private final SourceLookup sourceLookup;
private final SourceProvider sourceProvider;
private final List<Double> values;
private Iterator<Double> iterator;
@ -96,21 +97,20 @@ public class SourceValueFetcherSortedDoubleIndexFieldData extends SourceValueFet
private SourceValueFetcherSortedNumericDoubleValues(
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
this.leafReaderContext = leafReaderContext;
this.valueFetcher = valueFetcher;
this.sourceLookup = sourceLookup;
this.sourceProvider = sourceProvider;
values = new ArrayList<>();
}
@Override
public boolean advanceExact(int doc) throws IOException {
sourceLookup.setSegmentAndDocument(leafReaderContext, doc);
values.clear();
for (Object value : valueFetcher.fetchValues(sourceLookup, doc, Collections.emptyList())) {
Source source = sourceProvider.getSource(leafReaderContext, doc);
for (Object value : valueFetcher.fetchValues(source, doc, Collections.emptyList())) {
assert value instanceof Number;
values.add(((Number) value).doubleValue());
}

View file

@ -15,7 +15,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.script.field.DocValuesScriptFieldFactory;
import org.elasticsearch.script.field.ToScriptFieldFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceProvider;
import java.io.IOException;
import java.util.ArrayList;
@ -31,10 +32,10 @@ public class SourceValueFetcherSortedNumericIndexFieldData extends SourceValueFe
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedNumericDocValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
@ -43,7 +44,7 @@ public class SourceValueFetcherSortedNumericIndexFieldData extends SourceValueFe
fieldName,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
toScriptFieldFactory
);
}
@ -53,15 +54,15 @@ public class SourceValueFetcherSortedNumericIndexFieldData extends SourceValueFe
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedNumericDocValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
public SourceValueFetcherSortedNumericLeafFieldData loadDirect(LeafReaderContext context) throws Exception {
return new SourceValueFetcherSortedNumericLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceLookup);
return new SourceValueFetcherSortedNumericLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceProvider);
}
public static class SourceValueFetcherSortedNumericLeafFieldData extends SourceValueFetcherLeafFieldData<SortedNumericDocValues> {
@ -70,15 +71,15 @@ public class SourceValueFetcherSortedNumericIndexFieldData extends SourceValueFe
ToScriptFieldFactory<SortedNumericDocValues> toScriptFieldFactory,
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceLookup);
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceProvider);
}
@Override
public DocValuesScriptFieldFactory getScriptFieldFactory(String name) {
return toScriptFieldFactory.getScriptFieldFactory(
new SourceValueFetcherSortedNumericDocValues(leafReaderContext, valueFetcher, sourceLookup),
new SourceValueFetcherSortedNumericDocValues(leafReaderContext, valueFetcher, sourceProvider),
name
);
}
@ -89,7 +90,7 @@ public class SourceValueFetcherSortedNumericIndexFieldData extends SourceValueFe
protected final LeafReaderContext leafReaderContext;
protected final ValueFetcher valueFetcher;
protected final SourceLookup sourceLookup;
protected final SourceProvider sourceProvider;
protected final List<Long> values;
protected Iterator<Long> iterator;
@ -97,21 +98,20 @@ public class SourceValueFetcherSortedNumericIndexFieldData extends SourceValueFe
public SourceValueFetcherSortedNumericDocValues(
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
this.leafReaderContext = leafReaderContext;
this.valueFetcher = valueFetcher;
this.sourceLookup = sourceLookup;
this.sourceProvider = sourceProvider;
values = new ArrayList<>();
}
@Override
public boolean advanceExact(int doc) throws IOException {
sourceLookup.setSegmentAndDocument(leafReaderContext, doc);
values.clear();
for (Object value : valueFetcher.fetchValues(sourceLookup, doc, Collections.emptyList())) {
Source source = sourceProvider.getSource(leafReaderContext, doc);
for (Object value : valueFetcher.fetchValues(source, doc, Collections.emptyList())) {
assert value instanceof Number;
values.add(((Number) value).longValue());
}

View file

@ -60,6 +60,23 @@ public abstract class StoredFieldLoader {
};
}
/**
* Creates a StoredFieldLoader tuned for sequential reads of _source
*/
public static StoredFieldLoader sequentialSource() {
return new StoredFieldLoader() {
@Override
public LeafStoredFieldLoader getLoader(LeafReaderContext ctx, int[] docs) {
return new ReaderStoredFieldLoader(sequentialReader(ctx), true, Set.of());
}
@Override
public List<String> fieldsToLoad() {
return List.of();
}
};
}
/**
* Creates a no-op StoredFieldLoader that will not load any fields from disk
*/
@ -82,7 +99,15 @@ public abstract class StoredFieldLoader {
if (docs == null) {
return leafReader::document;
}
if (leafReader instanceof SequentialStoredFieldsLeafReader lf && docs.length > 10 && hasSequentialDocs(docs)) {
if (docs.length > 10 && hasSequentialDocs(docs)) {
return sequentialReader(ctx);
}
return leafReader::document;
}
private static CheckedBiConsumer<Integer, FieldsVisitor, IOException> sequentialReader(LeafReaderContext ctx) {
LeafReader leafReader = ctx.reader();
if (leafReader instanceof SequentialStoredFieldsLeafReader lf) {
return lf.getSequentialStoredFieldsReader()::visitDocument;
}
return leafReader::document;

View file

@ -24,7 +24,7 @@ import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.rest.action.document.RestMultiGetAction;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.xcontent.ToXContentObject;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
@ -225,7 +225,7 @@ public class GetResult implements Writeable, Iterable<DocumentField>, ToXContent
return sourceAsMap;
}
sourceAsMap = SourceLookup.sourceAsMap(source);
sourceAsMap = Source.fromBytes(source).source();
return sourceAsMap;
}

View file

@ -280,7 +280,7 @@ public class BooleanFieldMapper extends FieldMapper {
name(),
CoreValuesSourceType.BOOLEAN,
sourceValueFetcher(sourcePaths),
searchLookup.source(),
searchLookup,
BooleanDocValuesField::new
);
}

View file

@ -789,7 +789,7 @@ public final class DateFieldMapper extends FieldMapper {
name(),
resolution.numericType().getValuesSourceType(),
sourceValueFetcher(sourcePaths),
searchLookup.source(),
searchLookup,
resolution.getDefaultToScriptFieldFactory()
);
}

View file

@ -21,7 +21,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParserConfiguration;
@ -133,7 +133,7 @@ public final class DocumentParser {
(ft, lookup, fto) -> ft.fielddataBuilder(
new FieldDataContext(context.indexSettings().getIndex().getName(), lookup, context.mappingLookup()::sourcePaths, fto)
).build(new IndexFieldDataCache.None(), new NoneCircuitBreakerService()),
new SourceLookup.ReaderSourceProvider()
(ctx, doc) -> Source.fromBytes(context.sourceToParse().source())
);
// field scripts can be called both by the loop at the end of this method and via
// the document reader, so to ensure that we don't run them multiple times we

View file

@ -389,7 +389,7 @@ public class GeoPointFieldMapper extends AbstractPointGeometryFieldMapper<GeoPoi
name(),
CoreValuesSourceType.GEOPOINT,
valueFetcher(sourcePaths, null, null),
searchLookup.source(),
searchLookup,
GeoPointDocValuesField::new
);
}

View file

@ -735,7 +735,7 @@ public final class KeywordFieldMapper extends FieldMapper {
name(),
CoreValuesSourceType.KEYWORD,
sourceValueFetcher(sourcePaths),
fieldDataContext.lookupSupplier().get().source(),
fieldDataContext.lookupSupplier().get(),
KeywordDocValuesField::new
);
}

View file

@ -13,7 +13,6 @@ import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.search.lookup.SourceLookup;
import java.util.ArrayList;
import java.util.Collection;
@ -492,18 +491,4 @@ public final class MappingLookup {
throw new MapperParsingException("Field [" + name + "] attempted to shadow a time_series_metric");
}
}
/**
* Returns a SourceProvider describing how to read the source. If using synthetic source, returns the null source provider
* expecting the source to either be provided later by a fetch phase or not be accessed at all (as in scripts).
* @return
*/
public SourceLookup.SourceProvider getSourceProvider() {
SourceFieldMapper sourceMapper = (SourceFieldMapper) getMapper("_source");
if (sourceMapper == null || sourceMapper.isSynthetic() == false) {
return new SourceLookup.ReaderSourceProvider();
} else {
return new SourceLookup.NullSourceProvider();
}
}
}

View file

@ -56,7 +56,7 @@ import org.elasticsearch.search.aggregations.support.TimeSeriesValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.FieldValues;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.XContentParser.Token;
@ -413,14 +413,14 @@ public class NumberFieldMapper extends FieldMapper {
public IndexFieldData.Builder getValueFetcherFieldDataBuilder(
String name,
ValuesSourceType valuesSourceType,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ValueFetcher valueFetcher
) {
return new SourceValueFetcherSortedDoubleIndexFieldData.Builder(
name,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
HalfFloatDocValuesField::new
);
}
@ -564,14 +564,14 @@ public class NumberFieldMapper extends FieldMapper {
public IndexFieldData.Builder getValueFetcherFieldDataBuilder(
String name,
ValuesSourceType valuesSourceType,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ValueFetcher valueFetcher
) {
return new SourceValueFetcherSortedDoubleIndexFieldData.Builder(
name,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
FloatDocValuesField::new
);
}
@ -693,14 +693,14 @@ public class NumberFieldMapper extends FieldMapper {
public IndexFieldData.Builder getValueFetcherFieldDataBuilder(
String name,
ValuesSourceType valuesSourceType,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ValueFetcher valueFetcher
) {
return new SourceValueFetcherSortedDoubleIndexFieldData.Builder(
name,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
DoubleDocValuesField::new
);
}
@ -797,14 +797,14 @@ public class NumberFieldMapper extends FieldMapper {
public IndexFieldData.Builder getValueFetcherFieldDataBuilder(
String name,
ValuesSourceType valuesSourceType,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ValueFetcher valueFetcher
) {
return new SourceValueFetcherSortedNumericIndexFieldData.Builder(
name,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
ByteDocValuesField::new
);
}
@ -886,14 +886,14 @@ public class NumberFieldMapper extends FieldMapper {
public IndexFieldData.Builder getValueFetcherFieldDataBuilder(
String name,
ValuesSourceType valuesSourceType,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ValueFetcher valueFetcher
) {
return new SourceValueFetcherSortedNumericIndexFieldData.Builder(
name,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
ShortDocValuesField::new
);
}
@ -1042,14 +1042,14 @@ public class NumberFieldMapper extends FieldMapper {
public IndexFieldData.Builder getValueFetcherFieldDataBuilder(
String name,
ValuesSourceType valuesSourceType,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ValueFetcher valueFetcher
) {
return new SourceValueFetcherSortedNumericIndexFieldData.Builder(
name,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
IntegerDocValuesField::new
);
}
@ -1168,14 +1168,14 @@ public class NumberFieldMapper extends FieldMapper {
public IndexFieldData.Builder getValueFetcherFieldDataBuilder(
String name,
ValuesSourceType valuesSourceType,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ValueFetcher valueFetcher
) {
return new SourceValueFetcherSortedNumericIndexFieldData.Builder(
name,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
LongDocValuesField::new
);
}
@ -1415,7 +1415,7 @@ public class NumberFieldMapper extends FieldMapper {
public IndexFieldData.Builder getValueFetcherFieldDataBuilder(
String name,
ValuesSourceType valuesSourceType,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ValueFetcher valueFetcher
) {
throw new UnsupportedOperationException("not supported for source fallback");
@ -1592,12 +1592,7 @@ public class NumberFieldMapper extends FieldMapper {
if (operation == FielddataOperation.SCRIPT) {
SearchLookup searchLookup = fieldDataContext.lookupSupplier().get();
Set<String> sourcePaths = fieldDataContext.sourcePathsLookup().apply(name());
return type.getValueFetcherFieldDataBuilder(
name(),
valuesSourceType,
searchLookup.source(),
sourceValueFetcher(sourcePaths)
);
return type.getValueFetcherFieldDataBuilder(name(), valuesSourceType, searchLookup, sourceValueFetcher(sourcePaths));
}
throw new IllegalStateException("unknown field data type [" + operation.name() + "]");

View file

@ -101,6 +101,11 @@ public class SourceFieldMapper extends MetadataFieldMapper {
super(Defaults.NAME);
}
public Builder setSynthetic() {
this.mode.setValue(Mode.SYNTHETIC);
return this;
}
@Override
protected Parameter<?>[] getParameters() {
return new Parameter<?>[] { enabled, mode, includes, excludes };

View file

@ -1015,7 +1015,7 @@ public class TextFieldMapper extends FieldMapper {
name(),
CoreValuesSourceType.KEYWORD,
SourceValueFetcher.toString(fieldDataContext.sourcePathsLookup().apply(name())),
fieldDataContext.lookupSupplier().get().source(),
fieldDataContext.lookupSupplier().get(),
TextDocValuesField::new
);
}

View file

@ -59,7 +59,7 @@ import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.NestedDocuments;
import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.xcontent.XContentParserConfiguration;
@ -491,9 +491,23 @@ public class SearchExecutionContext extends QueryRewriteContext {
*/
public SearchLookup lookup() {
if (this.lookup == null) {
SourceLookup.SourceProvider sourceProvider = mappingLookup == null
? new SourceLookup.ReaderSourceProvider()
: mappingLookup.getSourceProvider();
SourceProvider sourceProvider = isSourceSynthetic()
? (ctx, doc) -> { throw new IllegalArgumentException("Cannot access source from scripts in synthetic mode"); }
: SourceProvider.fromStoredFields();
setSourceProvider(sourceProvider);
}
return this.lookup;
}
/**
* Replace the standard source provider on the SearchLookup
*
* Note that this will replace the current SearchLookup with a new one, but will not update
* the source provider on previously build lookups. This method should only be called before
* IndexReader access by the current context
*/
public void setSourceProvider(SourceProvider sourceProvider) {
// TODO can we assert that this is only called during FetchPhase?
this.lookup = new SearchLookup(
this::getFieldType,
(fieldType, searchLookup, fielddataOperation) -> indexFieldDataLookup.apply(
@ -503,8 +517,6 @@ public class SearchExecutionContext extends QueryRewriteContext {
sourceProvider
);
}
return this.lookup;
}
public NestedScope nestedScope() {
return nestedScope;

View file

@ -12,13 +12,14 @@ import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.OnScriptError;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Supplier;
import static org.elasticsearch.core.TimeValue.timeValueMillis;
@ -62,13 +63,14 @@ public abstract class AbstractFieldScript extends DocBasedScript {
);
}
@SuppressWarnings("unchecked")
private static final Map<String, Function<Object, Object>> PARAMS_FUNCTIONS = Map.of(
"_source",
value -> ((SourceLookup) value).source()
value -> ((Supplier<Source>) value).get().source()
);
protected final String fieldName;
protected final SourceLookup sourceLookup;
protected final Supplier<Source> source;
private final Map<String, Object> params;
private final OnScriptError onScriptError;
@ -79,13 +81,16 @@ public abstract class AbstractFieldScript extends DocBasedScript {
LeafReaderContext ctx,
OnScriptError onScriptError
) {
super(new DocValuesDocReader(searchLookup, ctx));
this(fieldName, params, new DocValuesDocReader(searchLookup, ctx), onScriptError);
}
private AbstractFieldScript(String fieldName, Map<String, Object> params, DocReader docReader, OnScriptError onScriptError) {
super(docReader);
this.fieldName = fieldName;
Map<String, Object> docAsMap = docAsMap();
this.sourceLookup = (SourceLookup) docAsMap.get("_source");
this.source = docReader.source();
params = new HashMap<>(params);
params.put("_source", sourceLookup);
params.put("_fields", docAsMap.get("_fields"));
params.put("_source", this.source);
params.put("_fields", docReader.docAsMap().get("_fields"));
this.params = new DynamicMap(params, PARAMS_FUNCTIONS);
this.onScriptError = onScriptError;
}
@ -98,7 +103,7 @@ public abstract class AbstractFieldScript extends DocBasedScript {
}
protected List<Object> extractFromSource(String path) {
return XContentMapValues.extractRawValues(path, sourceLookup.source());
return XContentMapValues.extractRawValues(path, source.get().source());
}
protected final void emitFromCompositeScript(CompositeFieldScript compositeFieldScript) {

View file

@ -12,16 +12,18 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Supplier;
abstract class AbstractSortScript extends DocBasedScript implements ScorerAware {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DynamicMap.class);
@SuppressWarnings("unchecked")
private static final Map<String, Function<Object, Object>> PARAMS_FUNCTIONS = Map.of("doc", value -> {
deprecationLogger.warn(
DeprecationCategory.SCRIPTING,
@ -36,7 +38,7 @@ abstract class AbstractSortScript extends DocBasedScript implements ScorerAware
"Accessing variable [doc] via [params._doc] from within an sort-script " + "is deprecated in favor of directly accessing [doc]."
);
return value;
}, "_source", value -> ((SourceLookup) value).source());
}, "_source", value -> ((Supplier<Source>) value).get().source());
/**
* The generic runtime parameters for the script.

View file

@ -14,12 +14,13 @@ import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Supplier;
public abstract class AggregationScript extends DocBasedScript implements ScorerAware {
@ -28,6 +29,8 @@ public abstract class AggregationScript extends DocBasedScript implements Scorer
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("aggs", Factory.class);
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DynamicMap.class);
@SuppressWarnings("unchecked")
private static final Map<String, Function<Object, Object>> PARAMS_FUNCTIONS = Map.of("doc", value -> {
deprecationLogger.warn(
DeprecationCategory.SCRIPTING,
@ -44,7 +47,7 @@ public abstract class AggregationScript extends DocBasedScript implements Scorer
+ "is deprecated in favor of directly accessing [doc]."
);
return value;
}, "_source", value -> ((SourceLookup) value).source());
}, "_source", value -> ((Supplier<Source>) value).get().source());
/**
* The generic runtime parameters for the script.
@ -59,9 +62,13 @@ public abstract class AggregationScript extends DocBasedScript implements Scorer
private Object value;
public AggregationScript(Map<String, Object> params, SearchLookup lookup, LeafReaderContext leafContext) {
super(new DocValuesDocReader(lookup, leafContext));
this(params, new DocValuesDocReader(lookup, leafContext));
}
private AggregationScript(Map<String, Object> params, DocReader docReader) {
super(docReader);
this.params = new DynamicMap(new HashMap<>(params), PARAMS_FUNCTIONS);
this.params.putAll(docAsMap());
this.params.putAll(docReader.docAsMap());
}
protected AggregationScript() {

View file

@ -11,9 +11,12 @@ package org.elasticsearch.script;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.script.field.EmptyField;
import org.elasticsearch.script.field.Field;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.xcontent.XContentType;
import java.util.Collections;
import java.util.Map;
import java.util.function.Supplier;
import java.util.stream.Stream;
public abstract class DocBasedScript {
@ -53,6 +56,13 @@ public abstract class DocBasedScript {
return docReader.docAsMap();
}
public Supplier<Source> source() {
if (docReader == null) {
return () -> Source.empty(XContentType.JSON);
}
return docReader.source();
}
/**
* The doc lookup for the Lucene segment this script was created for.
*/

View file

@ -10,8 +10,10 @@ package org.elasticsearch.script;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.script.field.Field;
import org.elasticsearch.search.lookup.Source;
import java.util.Map;
import java.util.function.Supplier;
import java.util.stream.Stream;
/**
@ -36,4 +38,7 @@ public interface DocReader {
/** Old-style doc['field'] access */
Map<String, ScriptDocValues<?>> doc();
/** Helper for source access */
Supplier<Source> source();
}

View file

@ -15,8 +15,10 @@ import org.elasticsearch.script.field.Field;
import org.elasticsearch.search.lookup.LeafDocLookup;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.Source;
import java.util.Map;
import java.util.function.Supplier;
import java.util.stream.Stream;
/**
@ -73,4 +75,9 @@ public class DocValuesDocReader implements DocReader, LeafReaderContextSupplier
public LeafReaderContext getLeafReaderContext() {
return leafReaderContext;
}
@Override
public Supplier<Source> source() {
return leafSearchLookup.source();
}
}

View file

@ -12,12 +12,13 @@ import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* A script to produce dynamic values for return fields.
@ -27,6 +28,8 @@ public abstract class FieldScript extends DocBasedScript {
public static final String[] PARAMETERS = {};
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DynamicMap.class);
@SuppressWarnings("unchecked")
private static final Map<String, Function<Object, Object>> PARAMS_FUNCTIONS = Map.of("doc", value -> {
deprecationLogger.warn(
DeprecationCategory.SCRIPTING,
@ -42,15 +45,19 @@ public abstract class FieldScript extends DocBasedScript {
+ "is deprecated in favor of directly accessing [doc]."
);
return value;
}, "_source", value -> ((SourceLookup) value).source());
}, "_source", value -> ((Supplier<Source>) value).get().source());
/** The generic runtime parameters for the script. */
private final Map<String, Object> params;
public FieldScript(Map<String, Object> params, SearchLookup lookup, LeafReaderContext leafContext) {
super(new DocValuesDocReader(lookup, leafContext));
this(params, new DocValuesDocReader(lookup, leafContext));
}
private FieldScript(Map<String, Object> params, DocReader docReader) {
super(docReader);
params = new HashMap<>(params);
params.putAll(docAsMap());
params.putAll(docReader.docAsMap());
this.params = new DynamicMap(params, PARAMS_FUNCTIONS);
}

View file

@ -138,7 +138,7 @@ public abstract class GeoPointFieldScript extends AbstractFieldScript {
@Override
protected List<Object> extractFromSource(String path) {
Object value = XContentMapValues.extractValue(path, sourceLookup.source());
Object value = XContentMapValues.extractValue(path, source.get().source());
if (value instanceof List<?>) {
@SuppressWarnings("unchecked")
List<Object> list = (List<Object>) value;

View file

@ -12,7 +12,7 @@ import org.apache.lucene.search.Scorable;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import java.io.IOException;
import java.io.UncheckedIOException;
@ -20,6 +20,7 @@ import java.util.HashMap;
import java.util.Map;
import java.util.function.DoubleSupplier;
import java.util.function.Function;
import java.util.function.Supplier;
/**
* A script used for adjusting the score on a per document basis.
@ -51,6 +52,7 @@ public abstract class ScoreScript extends DocBasedScript {
}
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DynamicMap.class);
@SuppressWarnings("unchecked")
private static final Map<String, Function<Object, Object>> PARAMS_FUNCTIONS = Map.of("doc", value -> {
deprecationLogger.warn(
DeprecationCategory.SCRIPTING,
@ -66,7 +68,7 @@ public abstract class ScoreScript extends DocBasedScript {
+ "is deprecated in favor of directly accessing [doc]."
);
return value;
}, "_source", value -> ((SourceLookup) value).source());
}, "_source", value -> ((Supplier<Source>) value).get().source());
public static final String[] PARAMETERS = new String[] { "explanation" };

View file

@ -15,13 +15,14 @@ import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Supplier;
public class ScriptedMetricAggContexts {
@ -55,6 +56,8 @@ public class ScriptedMetricAggContexts {
public abstract static class MapScript extends DocBasedScript {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DynamicMap.class);
@SuppressWarnings("unchecked")
private static final Map<String, Function<Object, Object>> PARAMS_FUNCTIONS = Map.of("doc", value -> {
deprecationLogger.warn(
DeprecationCategory.SCRIPTING,
@ -79,17 +82,23 @@ public class ScriptedMetricAggContexts {
+ "is deprecated in favor of using [state]."
);
return value;
}, "_source", value -> ((SourceLookup) value).source());
}, "_source", value -> ((Supplier<Source>) value).get().source());
private final Map<String, Object> params;
private final Map<String, Object> state;
private Scorable scorer;
public MapScript(Map<String, Object> params, Map<String, Object> state, SearchLookup lookup, LeafReaderContext leafContext) {
super(leafContext == null ? null : new DocValuesDocReader(lookup, leafContext));
this(params, state, leafContext == null ? null : new DocValuesDocReader(lookup, leafContext));
}
private MapScript(Map<String, Object> params, Map<String, Object> state, DocReader docReader) {
super(docReader);
this.state = state;
params = new HashMap<>(params); // copy params so we aren't modifying input
params.putAll(docAsMap()); // add lookup vars
if (docReader != null) {
params.putAll(docReader.docAsMap()); // add lookup vars
}
this.params = new DynamicMap(params, PARAMS_FUNCTIONS); // wrap with deprecations
}

View file

@ -13,12 +13,13 @@ import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import java.util.function.Supplier;
public abstract class TermsSetQueryScript {
@ -27,6 +28,7 @@ public abstract class TermsSetQueryScript {
public static final ScriptContext<Factory> CONTEXT = new ScriptContext<>("terms_set", Factory.class);
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DynamicMap.class);
@SuppressWarnings("unchecked")
private static final Map<String, Function<Object, Object>> PARAMS_FUNCTIONS = Map.of("doc", value -> {
deprecationLogger.warn(
DeprecationCategory.SCRIPTING,
@ -43,7 +45,7 @@ public abstract class TermsSetQueryScript {
+ "is deprecated in favor of directly accessing [doc]."
);
return value;
}, "_source", value -> ((SourceLookup) value).source());
}, "_source", value -> ((Supplier<Source>) value).get().source());
/**
* The generic runtime parameters for the script.

View file

@ -33,7 +33,6 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.fetch.subphase.LookupField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.xcontent.ConstructingObjectParser;
import org.elasticsearch.xcontent.ObjectParser;
@ -402,7 +401,7 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable<Do
return sourceAsMap;
}
sourceAsMap = SourceLookup.sourceAsMap(source);
sourceAsMap = Source.fromBytes(source).source();
return sourceAsMap;
}

View file

@ -15,9 +15,11 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.BytesRefHash;
import org.elasticsearch.common.util.ObjectArray;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.core.Releasables;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.TextSearchInfo;
@ -43,7 +45,9 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.Bucket
import org.elasticsearch.search.aggregations.bucket.terms.heuristic.SignificanceHeuristic;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.SamplingContext;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceFilter;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.search.profile.Timer;
import java.io.IOException;
@ -224,7 +228,7 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory {
.toArray(String[]::new);
if (context.profiling()) {
return new ProfilingSignificantTextCollectorSource(
context.lookup().source(),
context.lookup(),
context.bigArrays(),
fieldType,
analyzer,
@ -233,7 +237,7 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory {
);
}
return new SignificantTextCollectorSource(
context.lookup().source(),
context.lookup(),
context.bigArrays(),
fieldType,
analyzer,
@ -243,7 +247,8 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory {
}
private static class SignificantTextCollectorSource implements MapStringTermsAggregator.CollectorSource {
private final SourceLookup sourceLookup;
private final SourceProvider sourceProvider;
private final SourceFilter sourceFilter;
private final BigArrays bigArrays;
private final MappedFieldType fieldType;
private final Analyzer analyzer;
@ -252,18 +257,19 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory {
private ObjectArray<DuplicateByteSequenceSpotter> dupSequenceSpotters;
SignificantTextCollectorSource(
SourceLookup sourceLookup,
SourceProvider sourceProvider,
BigArrays bigArrays,
MappedFieldType fieldType,
Analyzer analyzer,
String[] sourceFieldNames,
boolean filterDuplicateText
) {
this.sourceLookup = sourceLookup;
this.sourceProvider = sourceProvider;
this.bigArrays = bigArrays;
this.fieldType = fieldType;
this.analyzer = analyzer;
this.sourceFieldNames = sourceFieldNames;
this.sourceFilter = new SourceFilter(sourceFieldNames, Strings.EMPTY_ARRAY);
dupSequenceSpotters = filterDuplicateText ? bigArrays.newObjectArray(1) : null;
}
@ -306,12 +312,11 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory {
}
private void collectFromSource(int doc, long owningBucketOrd, DuplicateByteSequenceSpotter spotter) throws IOException {
sourceLookup.setSegmentAndDocument(ctx, doc);
BytesRefHash inDocTerms = new BytesRefHash(256, bigArrays);
Source source = sourceProvider.getSource(ctx, doc).filter(sourceFilter);
try {
for (String sourceField : sourceFieldNames) {
Iterator<String> itr = extractRawValues(sourceField).stream().map(obj -> {
Iterator<String> itr = extractRawValues(source, sourceField).stream().map(obj -> {
if (obj == null) {
return null;
}
@ -400,8 +405,8 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory {
/**
* Extract values from {@code _source}.
*/
protected List<Object> extractRawValues(String field) {
return sourceLookup.extractRawValuesWithoutCaching(field);
protected List<Object> extractRawValues(Source source, String field) {
return XContentMapValues.extractRawValues(field, source.source());
}
@Override
@ -417,14 +422,14 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory {
private long charsFetched;
private ProfilingSignificantTextCollectorSource(
SourceLookup sourceLookup,
SourceProvider sourceProvider,
BigArrays bigArrays,
MappedFieldType fieldType,
Analyzer analyzer,
String[] sourceFieldNames,
boolean filterDuplicateText
) {
super(sourceLookup, bigArrays, fieldType, analyzer, sourceFieldNames, filterDuplicateText);
super(sourceProvider, bigArrays, fieldType, analyzer, sourceFieldNames, filterDuplicateText);
}
@Override
@ -464,10 +469,10 @@ public class SignificantTextAggregatorFactory extends AggregatorFactory {
}
@Override
protected List<Object> extractRawValues(String field) {
protected List<Object> extractRawValues(Source source, String field) {
extract.start();
try {
return super.extractRawValues(field);
return super.extractRawValues(source, field);
} finally {
extract.stop();
}

View file

@ -26,7 +26,7 @@ import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.search.profile.ProfileResult;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.tasks.TaskCancelledException;
@ -84,6 +84,16 @@ public class FetchPhase {
}
}
private static class PreloadedSourceProvider implements SourceProvider {
Source source;
@Override
public Source getSource(LeafReaderContext ctx, int doc) throws IOException {
return source;
}
}
private SearchHits buildSearchHits(SearchContext context, Profiler profiler) {
FetchContext fetchContext = new FetchContext(context);
@ -102,6 +112,9 @@ public class FetchPhase {
NestedDocuments nestedDocuments = context.getSearchExecutionContext().getNestedDocuments();
PreloadedSourceProvider sourceProvider = new PreloadedSourceProvider();
context.getSearchExecutionContext().setSourceProvider(sourceProvider);
FetchPhaseDocsIterator docsIterator = new FetchPhaseDocsIterator() {
LeafReaderContext ctx;
@ -137,6 +150,7 @@ public class FetchPhase {
ctx,
leafSourceLoader
);
sourceProvider.source = hit.source();
for (FetchSubPhaseProcessor processor : processors) {
processor.process(hit);
}
@ -187,15 +201,7 @@ public class FetchPhase {
SourceLoader.Leaf sourceLoader
) throws IOException {
if (nestedDocuments.advance(docId - subReaderContext.docBase) == null) {
return prepareNonNestedHitContext(
context,
requiresSource,
profiler,
leafStoredFieldLoader,
docId,
subReaderContext,
sourceLoader
);
return prepareNonNestedHitContext(requiresSource, profiler, leafStoredFieldLoader, docId, subReaderContext, sourceLoader);
} else {
return prepareNestedHitContext(
context,
@ -217,7 +223,6 @@ public class FetchPhase {
* allows fetch subphases that use the hit context to access the preloaded source.
*/
private static HitContext prepareNonNestedHitContext(
SearchContext context,
boolean requiresSource,
Profiler profiler,
LeafStoredFieldLoader leafStoredFieldLoader,
@ -240,9 +245,6 @@ public class FetchPhase {
try {
profiler.startLoadingSource();
source = sourceLoader.source(leafStoredFieldLoader, subDocId);
SourceLookup scriptSourceLookup = context.getSearchExecutionContext().lookup().source();
scriptSourceLookup.setSegmentAndDocument(subReaderContext, subDocId);
scriptSourceLookup.setSourceProvider(new SourceLookup.BytesSourceProvider(source.internalSourceRef()));
} finally {
profiler.stopLoadingSource();
}

View file

@ -10,35 +10,44 @@ package org.elasticsearch.search.lookup;
import org.apache.lucene.index.LeafReaderContext;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Map;
import java.util.function.Supplier;
/**
* Per-segment version of {@link SearchLookup}.
*/
public class LeafSearchLookup {
private final LeafReaderContext ctx;
private int doc;
private final LeafDocLookup docMap;
private final SourceLookup sourceLookup;
private final LeafStoredFieldsLookup fieldsLookup;
private final Map<String, Object> asMap;
private final Supplier<Source> source;
public LeafSearchLookup(LeafReaderContext ctx, LeafDocLookup docMap, SourceLookup sourceLookup, LeafStoredFieldsLookup fieldsLookup) {
this.ctx = ctx;
public LeafSearchLookup(
LeafReaderContext ctx,
LeafDocLookup docMap,
SourceProvider sourceProvider,
LeafStoredFieldsLookup fieldsLookup
) {
this.docMap = docMap;
this.sourceLookup = sourceLookup;
this.fieldsLookup = fieldsLookup;
this.asMap = Map.of("doc", docMap, "_doc", docMap, "_source", sourceLookup, "_fields", fieldsLookup);
this.source = () -> {
try {
return sourceProvider.getSource(ctx, doc);
} catch (IOException e) {
throw new UncheckedIOException("Couldn't load source", e);
}
};
this.asMap = Map.of("doc", docMap, "_doc", docMap, "_source", source, "_fields", fieldsLookup);
}
public Map<String, Object> asMap() {
return this.asMap;
}
public SourceLookup source() {
return this.sourceLookup;
}
public LeafStoredFieldsLookup fields() {
return this.fieldsLookup;
}
@ -47,9 +56,13 @@ public class LeafSearchLookup {
return this.docMap;
}
public Supplier<Source> source() {
return this.source;
}
public void setDocument(int docId) {
this.doc = docId;
docMap.setDocument(docId);
sourceLookup.setSegmentAndDocument(ctx, docId);
fieldsLookup.setDocument(docId);
}
}

View file

@ -13,6 +13,7 @@ import org.elasticsearch.common.TriFunction;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Objects;
@ -20,7 +21,7 @@ import java.util.Set;
import java.util.function.Function;
import java.util.function.Supplier;
public class SearchLookup {
public class SearchLookup implements SourceProvider {
/**
* The maximum depth of field dependencies.
* When a runtime field's doc values depends on another runtime field's doc values,
@ -39,7 +40,7 @@ public class SearchLookup {
* {@code b}'s chain will contain {@code ["a", "b"]}.
*/
private final Set<String> fieldChain;
private final SourceLookup sourceLookup;
private final SourceProvider sourceProvider;
private final Function<String, MappedFieldType> fieldTypeLookup;
private final TriFunction<
MappedFieldType,
@ -54,11 +55,11 @@ public class SearchLookup {
public SearchLookup(
Function<String, MappedFieldType> fieldTypeLookup,
TriFunction<MappedFieldType, Supplier<SearchLookup>, MappedFieldType.FielddataOperation, IndexFieldData<?>> fieldDataLookup,
SourceLookup.SourceProvider sourceProvider
SourceProvider sourceProvider
) {
this.fieldTypeLookup = fieldTypeLookup;
this.fieldChain = Collections.emptySet();
this.sourceLookup = new SourceLookup(sourceProvider);
this.sourceProvider = sourceProvider;
this.fieldDataLookup = fieldDataLookup;
}
@ -71,7 +72,7 @@ public class SearchLookup {
*/
private SearchLookup(SearchLookup searchLookup, Set<String> fieldChain) {
this.fieldChain = Collections.unmodifiableSet(fieldChain);
this.sourceLookup = searchLookup.sourceLookup;
this.sourceProvider = searchLookup.sourceProvider;
this.fieldTypeLookup = searchLookup.fieldTypeLookup;
this.fieldDataLookup = searchLookup.fieldDataLookup;
}
@ -101,7 +102,7 @@ public class SearchLookup {
return new LeafSearchLookup(
context,
new LeafDocLookup(fieldTypeLookup, this::getForField, context),
sourceLookup,
sourceProvider,
new LeafStoredFieldsLookup(fieldTypeLookup, (doc, visitor) -> context.reader().document(doc, visitor))
);
}
@ -114,7 +115,8 @@ public class SearchLookup {
return fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name()), options);
}
public SourceLookup source() {
return sourceLookup;
@Override
public Source getSource(LeafReaderContext ctx, int doc) throws IOException {
return sourceProvider.getSource(ctx, doc);
}
}

View file

@ -1,426 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.core.MemoizedSupplier;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
public class SourceLookup implements Source, Map<String, Object> {
private SourceProvider sourceProvider;
private int docId = -1;
public SourceLookup(SourceProvider sourceProvider) {
this.sourceProvider = sourceProvider;
}
@Override
public XContentType sourceContentType() {
return sourceProvider.sourceContentType();
}
public int docId() {
return docId;
}
@Override
public Map<String, Object> source() {
return sourceProvider.source();
}
public void setSourceProvider(SourceProvider sourceProvider) {
this.sourceProvider = sourceProvider;
}
public void setSegmentAndDocument(LeafReaderContext context, int docId) {
sourceProvider.setSegmentAndDocument(context, docId);
this.docId = docId;
}
/**
* Internal source representation, might be compressed....
*/
@Override
public BytesReference internalSourceRef() {
return sourceProvider.sourceAsBytes();
}
@Override
public Source filter(SourceFilter sourceFilter) {
return sourceFilter.filterMap(this);
}
/**
* Checks if the source has been deserialized as a {@link Map} of java objects.
*/
public boolean hasSourceAsMap() {
return sourceProvider.hasSourceAsMap();
}
/**
* Returns the values associated with the path. Those are "low" level values, and it can
* handle path expression where an array/list is navigated within.
*
* This method will:
*
* - not cache source if it's not already parsed
* - will only extract the desired values from the compressed source instead of deserializing the whole object
*
* This is useful when the caller only wants a single value from source and does not care of source is fully parsed and cached
* for later use.
* @param path The path from which to extract the values from source
* @return The list of found values or an empty list if none are found
*/
public List<Object> extractRawValuesWithoutCaching(String path) {
return sourceProvider.extractRawValuesWithoutCaching(path);
}
private static Tuple<XContentType, Map<String, Object>> sourceAsMapAndType(BytesReference source) throws ElasticsearchParseException {
return XContentHelper.convertToMap(source, false);
}
/**
* Get the source as a {@link Map} of java objects.
* <p>
* Important: This can lose precision on numbers with a decimal point. It
* converts numbers like {@code "n": 1234.567} to a {@code double} which
* only has 52 bits of precision in the mantissa. This will come up most
* frequently when folks write nanosecond precision dates as a decimal
* number.
*/
public static Map<String, Object> sourceAsMap(BytesReference source) throws ElasticsearchParseException {
return sourceAsMapAndType(source).v2();
}
@Override
public Object get(Object key) {
return source().get(key);
}
@Override
public int size() {
return source().size();
}
@Override
public boolean isEmpty() {
return source().isEmpty();
}
@Override
public boolean containsKey(Object key) {
return source().containsKey(key);
}
@Override
public boolean containsValue(Object value) {
return source().containsValue(value);
}
@Override
public Set<String> keySet() {
return source().keySet();
}
@Override
public Collection<Object> values() {
return source().values();
}
@Override
public Set<Map.Entry<String, Object>> entrySet() {
return source().entrySet();
}
@Override
public Object put(String key, Object value) {
throw new UnsupportedOperationException();
}
@Override
public Object remove(Object key) {
throw new UnsupportedOperationException();
}
@Override
@SuppressWarnings("rawtypes")
public void putAll(Map m) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
throw new UnsupportedOperationException();
}
/**
* SourceProvider describes how SourceLookup will access the source.
*/
public interface SourceProvider {
Map<String, Object> source();
XContentType sourceContentType();
BytesReference sourceAsBytes();
List<Object> extractRawValuesWithoutCaching(String path);
boolean hasSourceAsMap();
void setSegmentAndDocument(LeafReaderContext context, int docId);
}
/**
* Null source provider when we expect to be given a real source provider in the future.
* Exceptions expected to be primarily seen by scripts attempting to access synthetic source.
*/
public static class NullSourceProvider implements SourceProvider {
@Override
public Map<String, Object> source() {
throw new IllegalArgumentException("no source available");
}
@Override
public XContentType sourceContentType() {
throw new IllegalArgumentException("no source available");
}
@Override
public BytesReference sourceAsBytes() {
throw new IllegalArgumentException("no source available");
}
@Override
public List<Object> extractRawValuesWithoutCaching(String path) {
throw new IllegalArgumentException("no source available");
}
@Override
public boolean hasSourceAsMap() {
return false;
}
@Override
public void setSegmentAndDocument(LeafReaderContext context, int docId) {
// nothing to do
}
}
/**
* Provider for source using a given map and optional content type.
*/
private static class MapSourceProvider implements SourceProvider {
private final Map<String, Object> source;
private final XContentType sourceContentType;
private MapSourceProvider(Map<String, Object> source) {
this.source = source;
this.sourceContentType = XContentType.JSON;
}
@Override
public Map<String, Object> source() {
return source;
}
@Override
public XContentType sourceContentType() {
return sourceContentType;
}
@Override
public BytesReference sourceAsBytes() {
throw new UnsupportedOperationException("source as bytes unavailable - empty or already parsed to map");
}
@Override
public List<Object> extractRawValuesWithoutCaching(String path) {
return XContentMapValues.extractRawValues(path, source);
}
@Override
public boolean hasSourceAsMap() {
return true;
}
@Override
public void setSegmentAndDocument(LeafReaderContext context, int docId) {
//
}
}
/**
* Provider for source using given source bytes.
*/
public static class BytesSourceProvider implements SourceProvider {
private BytesReference sourceAsBytes;
private Map<String, Object> source;
private XContentType sourceContentType;
public BytesSourceProvider(BytesReference sourceAsBytes) {
this.sourceAsBytes = sourceAsBytes;
}
void parseSource() {
Tuple<XContentType, Map<String, Object>> tuple = sourceAsMapAndType(sourceAsBytes);
this.source = tuple.v2();
this.sourceContentType = tuple.v1();
}
@Override
public Map<String, Object> source() {
if (source == null) {
parseSource();
}
return source;
}
@Override
public XContentType sourceContentType() {
if (source == null) {
parseSource();
}
return sourceContentType;
}
@Override
public BytesReference sourceAsBytes() {
return sourceAsBytes;
}
@Override
public List<Object> extractRawValuesWithoutCaching(String path) {
if (source != null) {
return XContentMapValues.extractRawValues(path, source);
}
return XContentMapValues.extractRawValues(
path,
XContentHelper.convertToMap(sourceAsBytes, false, null, Set.of(path), null).v2()
);
}
@Override
public boolean hasSourceAsMap() {
return source != null;
}
@Override
public void setSegmentAndDocument(LeafReaderContext context, int docId) {
//
}
}
/**
* Provider for source using a fields visitor.
*/
public static class ReaderSourceProvider implements SourceProvider {
private LeafReader reader;
private CheckedBiConsumer<Integer, FieldsVisitor, IOException> fieldReader;
private int docId = -1;
private SourceProvider sourceProvider;
public void setSegmentAndDocument(LeafReaderContext context, int docId) {
// if we are called with the same document, don't invalidate source
if (this.reader == context.reader() && this.docId == docId) {
return;
}
// only reset reader and fieldReader when reader changes
if (this.reader != context.reader()) {
this.reader = context.reader();
// All the docs to fetch are adjacent but Lucene stored fields are optimized
// for random access and don't optimize for sequential access - except for merging.
// So we do a little hack here and pretend we're going to do merges in order to
// get better sequential access.
if (context.reader()instanceof SequentialStoredFieldsLeafReader lf) {
// Avoid eagerly loading the stored fields reader, since this can be expensive
Supplier<StoredFieldsReader> supplier = new MemoizedSupplier<>(lf::getSequentialStoredFieldsReader);
fieldReader = (d, v) -> supplier.get().visitDocument(d, v);
} else {
fieldReader = context.reader()::document;
}
}
this.sourceProvider = null;
this.docId = docId;
}
@Override
public Map<String, Object> source() {
if (sourceProvider == null) {
readSourceBytes();
}
return sourceProvider.source();
}
private void readSourceBytes() {
try {
FieldsVisitor sourceFieldVisitor = new FieldsVisitor(true);
fieldReader.accept(docId, sourceFieldVisitor);
BytesReference sourceAsBytes = sourceFieldVisitor.source();
if (sourceAsBytes == null) {
sourceProvider = new MapSourceProvider(Collections.emptyMap());
} else {
sourceProvider = new BytesSourceProvider(sourceAsBytes);
((BytesSourceProvider) sourceProvider).parseSource();
}
} catch (Exception e) {
throw new ElasticsearchParseException("failed to parse / load source", e);
}
}
@Override
public XContentType sourceContentType() {
if (sourceProvider == null) {
readSourceBytes();
}
return sourceProvider.sourceContentType();
}
@Override
public BytesReference sourceAsBytes() {
if (sourceProvider == null) {
readSourceBytes();
}
return sourceProvider.sourceAsBytes();
}
@Override
public List<Object> extractRawValuesWithoutCaching(String path) {
if (sourceProvider == null) {
readSourceBytes();
}
return sourceProvider.extractRawValuesWithoutCaching(path);
}
@Override
public boolean hasSourceAsMap() {
return sourceProvider != null && sourceProvider.hasSourceAsMap();
}
}
}

View file

@ -0,0 +1,58 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader;
import org.elasticsearch.index.fieldvisitor.StoredFieldLoader;
import java.io.IOException;
/**
* Provides access to the Source of a document
*/
public interface SourceProvider {
/**
* Get the Source for the given doc within the given context
*/
Source getSource(LeafReaderContext ctx, int doc) throws IOException;
/**
* A SourceProvider that loads source from stored fields
*/
static SourceProvider fromStoredFields() {
StoredFieldLoader storedFieldLoader = StoredFieldLoader.sequentialSource();
return new SourceProvider() {
// TODO we can make this segment thread safe by keeping an array of LeafStoredFieldLoader/doc/Source
// records, indexed by the ordinal of the LeafReaderContext
LeafReaderContext ctx;
int doc;
LeafStoredFieldLoader leafStoredFieldLoader;
Source source;
@Override
public Source getSource(LeafReaderContext ctx, int doc) throws IOException {
if (this.ctx == ctx) {
if (this.doc == doc) {
return source;
}
} else {
leafStoredFieldLoader = storedFieldLoader.getLoader(ctx, null);
}
this.ctx = ctx;
this.doc = doc;
leafStoredFieldLoader.advanceTo(doc);
return source = Source.fromBytes(leafStoredFieldLoader.source());
}
};
}
}

View file

@ -40,7 +40,6 @@ import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptCompiler;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.InternalSettingsPlugin;
@ -139,7 +138,7 @@ public class IndexFieldDataServiceTests extends ESSingleNodeTestCase {
searchLookupSetOnce.set(fdc.lookupSupplier());
return (IndexFieldData.Builder) (cache, breakerService) -> null;
});
SearchLookup searchLookup = new SearchLookup(null, null, new SourceLookup.ReaderSourceProvider());
SearchLookup searchLookup = new SearchLookup(null, null, (ctx, doc) -> null);
ifdService.getForField(ft, new FieldDataContext("qualified", () -> searchLookup, null, MappedFieldType.FielddataOperation.SEARCH));
assertSame(searchLookup, searchLookupSetOnce.get().get());
}

View file

@ -8,9 +8,7 @@
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.index.fielddata.SourceValueFetcherSortedBooleanIndexFieldData.SourceValueFetcherSortedBooleanDocValues;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -29,12 +27,7 @@ public class SourceValueFetcherIndexFieldDataTests extends ESTestCase {
SourceValueFetcherSortedBooleanDocValues values = new SourceValueFetcherSortedBooleanDocValues(
null,
(source, doc, ignoredValues) -> docs.get(doc),
new SourceLookup(null) {
@Override
public void setSegmentAndDocument(LeafReaderContext context, int docId) {
// do nothing
}
}
(ctx, doc) -> null
);
for (int doc = 0; doc < docs.size(); ++doc) {

View file

@ -11,6 +11,7 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
@ -36,7 +37,7 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.StringFieldScript;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
@ -258,13 +259,13 @@ public abstract class AbstractScriptFieldTypeTestCase extends MapperServiceTestC
}
protected static SearchExecutionContext mockContext(boolean allowExpensiveQueries, MappedFieldType mappedFieldType) {
return mockContext(allowExpensiveQueries, mappedFieldType, new SourceLookup.ReaderSourceProvider());
return mockContext(allowExpensiveQueries, mappedFieldType, SourceProvider.fromStoredFields());
}
protected static SearchExecutionContext mockContext(
boolean allowExpensiveQueries,
MappedFieldType mappedFieldType,
SourceLookup.SourceProvider sourceProvider
SourceProvider sourceProvider
) {
SearchExecutionContext context = mock(SearchExecutionContext.class);
if (mappedFieldType != null) {
@ -453,7 +454,7 @@ public abstract class AbstractScriptFieldTypeTestCase extends MapperServiceTestC
/**
* We need to make sure we don't randomize the useThreads parameter for subtests of this abstract test case
* because scripted fields use {@link SourceLookup} which isn't thread-safe.
* because scripted fields use {@link SearchLookup#getSource(LeafReaderContext, int)} which isn't thread-safe.
* Also Elasticsearch doesn't support concurrent searches so far, so we don't need to test it.
*/
protected IndexSearcher newUnthreadedSearcher(IndexReader reader) {

View file

@ -17,7 +17,6 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.BooleanFieldScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.List;
@ -59,7 +58,7 @@ public class BooleanFieldScriptTests extends FieldScriptTestCase<BooleanFieldScr
BooleanFieldScript script = new BooleanFieldScript(
"test",
Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, (ctx, doc) -> null),
OnScriptError.FAIL,
reader.leaves().get(0)
) {

View file

@ -427,8 +427,10 @@ public class BooleanScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeT
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object foo : (List<?>) lookup.source().source().get("foo")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit((Boolean) foo);
}
}
@ -441,8 +443,10 @@ public class BooleanScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeT
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object foo : (List<?>) lookup.source().source().get("foo")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit((Boolean) foo ^ ((Boolean) getParams().get("param")));
}
}

View file

@ -16,7 +16,7 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentType;
@ -345,7 +345,7 @@ public class CompositeRuntimeFieldTests extends MapperServiceTestCase {
(mft, lookupSupplier, fdo) -> mft.fielddataBuilder(
new FieldDataContext("test", lookupSupplier, mapperService.mappingLookup()::sourcePaths, fdo)
).build(null, null),
new SourceLookup.ReaderSourceProvider()
SourceProvider.fromStoredFields()
);
LeafSearchLookup leafSearchLookup = searchLookup.getLeafSearchLookup(reader.leaves().get(0));

View file

@ -19,7 +19,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.DateFieldScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
@ -68,7 +68,7 @@ public class DateFieldScriptTests extends FieldScriptTestCase<DateFieldScript.Fa
DateFieldScript script = new DateFieldScript(
"test",
Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, (ctx, doc) -> null),
DateFormatter.forPattern(randomDateFormatterPattern()).withLocale(randomLocale(random())),
OnScriptError.FAIL,
reader.leaves().get(0)
@ -105,7 +105,7 @@ public class DateFieldScriptTests extends FieldScriptTestCase<DateFieldScript.Fa
DateFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
DateFormatter.forPattern("epoch_millis"),
OnScriptError.FAIL
);

View file

@ -499,8 +499,10 @@ public class DateScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTest
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object timestamp : (List<?>) lookup.source().source().get("timestamp")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object timestamp : (List<?>) source.get("timestamp")) {
Parse parse = new Parse(this);
emit(parse.parse(timestamp));
}
@ -515,8 +517,10 @@ public class DateScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTest
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object timestamp : (List<?>) lookup.source().source().get("timestamp")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object timestamp : (List<?>) source.get("timestamp")) {
long epoch = (Long) timestamp;
ZonedDateTime dt = ZonedDateTime.ofInstant(Instant.ofEpochMilli(epoch), ZoneId.of("UTC"));
dt = dt.plus(((Number) params.get("days")).longValue(), ChronoUnit.DAYS);

View file

@ -18,7 +18,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.DoubleFieldScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
@ -66,7 +66,7 @@ public class DoubleFieldScriptTests extends FieldScriptTestCase<DoubleFieldScrip
DoubleFieldScript script = new DoubleFieldScript(
"test",
Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, (ctx, doc) -> null),
OnScriptError.FAIL,
reader.leaves().get(0)
) {
@ -102,7 +102,7 @@ public class DoubleFieldScriptTests extends FieldScriptTestCase<DoubleFieldScrip
DoubleFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
OnScriptError.FAIL
);
DoubleFieldScript doubleFieldScript = leafFactory.newInstance(reader.leaves().get(0));

View file

@ -254,8 +254,10 @@ public class DoubleScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTe
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object foo : (List<?>) lookup.source().source().get("foo")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit(((Number) foo).doubleValue());
}
}
@ -268,8 +270,10 @@ public class DoubleScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTe
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object foo : (List<?>) lookup.source().source().get("foo")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit(((Number) foo).doubleValue() + ((Number) getParams().get("param")).doubleValue());
}
}

View file

@ -17,7 +17,6 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.GeoPointFieldScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.List;
@ -61,7 +60,7 @@ public class GeoPointFieldScriptTests extends FieldScriptTestCase<GeoPointFieldS
GeoPointFieldScript script = new GeoPointFieldScript(
"test",
Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, (ctx, doc) -> null),
OnScriptError.FAIL,
reader.leaves().get(0)
) {

View file

@ -35,6 +35,7 @@ import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.lookup.Source;
import java.io.IOException;
import java.util.ArrayList;
@ -109,16 +110,16 @@ public class GeoPointScriptFieldTypeTests extends AbstractNonTextScriptFieldType
{"foo": {"lat": 45.0, "lon" : 45.0}}"""))));
try (DirectoryReader reader = iw.getReader()) {
SearchExecutionContext searchContext = mockContext(true, simpleMappedFieldType());
searchContext.lookup().source().setSegmentAndDocument(reader.leaves().get(0), 0);
Source source = searchContext.lookup().getSource(reader.leaves().get(0), 0);
ValueFetcher fetcher = simpleMappedFieldType().valueFetcher(searchContext, randomBoolean() ? null : "geojson");
fetcher.setNextReader(reader.leaves().get(0));
assertThat(
fetcher.fetchValues(searchContext.lookup().source(), 0, null),
fetcher.fetchValues(source, 0, null),
equalTo(List.of(Map.of("type", "Point", "coordinates", List.of(45.0, 45.0))))
);
fetcher = simpleMappedFieldType().valueFetcher(searchContext, "wkt");
fetcher.setNextReader(reader.leaves().get(0));
assertThat(fetcher.fetchValues(searchContext.lookup().source(), 0, null), equalTo(List.of("POINT (45.0 45.0)")));
assertThat(fetcher.fetchValues(source, 0, null), equalTo(List.of("POINT (45.0 45.0)")));
}
}
}
@ -241,8 +242,10 @@ public class GeoPointScriptFieldTypeTests extends AbstractNonTextScriptFieldType
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
Map<?, ?> foo = (Map<?, ?>) lookup.source().source().get("foo");
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
Map<?, ?> foo = (Map<?, ?>) source.get("foo");
emit(((Number) foo.get("lat")).doubleValue(), ((Number) foo.get("lon")).doubleValue());
}
};

View file

@ -14,7 +14,8 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.ArrayList;
@ -64,20 +65,15 @@ public class IgnoredFieldMapperTests extends MetadataMapperTestCase {
mapperService,
iw -> { iw.addDocument(mapperService.documentMapper().parse(source(b -> b.field("field", "value"))).rootDoc()); },
iw -> {
SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider()
);
SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup(mapperService), (ctx, doc) -> null);
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
when(searchExecutionContext.lookup()).thenReturn(lookup);
IgnoredFieldMapper.IgnoredFieldType ft = (IgnoredFieldMapper.IgnoredFieldType) mapperService.fieldType("_ignored");
ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null);
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
lookup.source().setSegmentAndDocument(context, 0);
valueFetcher.setNextReader(context);
assertEquals(List.of("field"), valueFetcher.fetchValues(lookup.source(), 0, new ArrayList<>()));
assertEquals(List.of("field"), valueFetcher.fetchValues(Source.empty(XContentType.JSON), 0, new ArrayList<>()));
}
);
}

View file

@ -12,7 +12,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.Collections;
@ -60,18 +59,13 @@ public class IndexFieldMapperTests extends MetadataMapperTestCase {
iw.addDocument(mapperService.documentMapper().parse(source).rootDoc());
}, iw -> {
IndexFieldMapper.IndexFieldType ft = (IndexFieldMapper.IndexFieldType) mapperService.fieldType("_index");
SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider()
);
SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup(mapperService), (ctx, doc) -> null);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);
ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null);
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
lookup.source().setSegmentAndDocument(context, 0);
valueFetcher.setNextReader(context);
assertEquals(List.of(index), valueFetcher.fetchValues(lookup.source(), 0, Collections.emptyList()));
assertEquals(List.of(index), valueFetcher.fetchValues(lookup.getSource(context, 0), 0, Collections.emptyList()));
});
}

View file

@ -18,7 +18,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.IpFieldScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
@ -67,7 +67,7 @@ public class IpFieldScriptTests extends FieldScriptTestCase<IpFieldScript.Factor
IpFieldScript script = new IpFieldScript(
"test",
Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, (ctx, doc) -> null),
OnScriptError.FAIL,
reader.leaves().get(0)
) {
@ -103,7 +103,7 @@ public class IpFieldScriptTests extends FieldScriptTestCase<IpFieldScript.Factor
IpFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
OnScriptError.FAIL
);
IpFieldScript ipFieldScript = leafFactory.newInstance(reader.leaves().get(0));

View file

@ -268,8 +268,10 @@ public class IpScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase {
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object foo : (List<?>) lookup.source().source().get("foo")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit(foo.toString());
}
}
@ -282,8 +284,10 @@ public class IpScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase {
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object foo : (List<?>) lookup.source().source().get("foo")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit(foo.toString() + getParams().get("param"));
}
}

View file

@ -40,7 +40,6 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.script.StringFieldScript;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.ArrayList;
@ -293,23 +292,6 @@ public class KeywordScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase
}
}
public void testSyntheticSourceAccess() throws IOException {
try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) {
iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [1]}"))));
iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"foo\": [2]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newUnthreadedSearcher(reader);
KeywordScriptFieldType fieldType = build("append_param", Map.of("param", "-suffix"), OnScriptError.FAIL);
expectThrows(
IllegalArgumentException.class,
() -> {
searcher.count(fieldType.termQuery("1-suffix", mockContext(true, null, new SourceLookup.NullSourceProvider())));
}
);
}
}
}
@Override
protected Query randomTermQuery(MappedFieldType ft, SearchExecutionContext ctx) {
return ft.termQuery(randomAlphaOfLengthBetween(1, 1000), ctx);
@ -413,8 +395,10 @@ public class KeywordScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object foo : (List<?>) lookup.source().source().get("foo")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit(foo.toString());
}
}
@ -427,8 +411,10 @@ public class KeywordScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object foo : (List<?>) lookup.source().source().get("foo")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit(foo.toString() + getParams().get("param").toString());
}
}

View file

@ -18,7 +18,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.LongFieldScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
@ -66,7 +66,7 @@ public class LongFieldScriptTests extends FieldScriptTestCase<LongFieldScript.Fa
LongFieldScript script = new LongFieldScript(
"test",
Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, (ctx, doc) -> null),
OnScriptError.FAIL,
reader.leaves().get(0)
) {
@ -102,7 +102,7 @@ public class LongFieldScriptTests extends FieldScriptTestCase<LongFieldScript.Fa
LongFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
OnScriptError.FAIL
);
LongFieldScript longFieldScript = leafFactory.newInstance(reader.leaves().get(0));

View file

@ -288,8 +288,10 @@ public class LongScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTest
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object foo : (List<?>) lookup.source().source().get("foo")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit(((Number) foo).longValue());
}
}
@ -303,8 +305,10 @@ public class LongScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTest
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object foo : (List<?>) lookup.source().source().get("foo")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object foo : (List<?>) source.get("foo")) {
emit(((Number) foo).longValue() + ((Number) getParams().get("param")).longValue());
}
}
@ -320,8 +324,10 @@ public class LongScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTest
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object timestamp : (List<?>) lookup.source().source().get("timestamp")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object timestamp : (List<?>) source.get("timestamp")) {
emit(now - ((Number) timestamp).longValue());
}
}

View file

@ -18,7 +18,8 @@ import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.fetch.subphase.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.FieldFetcher;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.XContentBuilder;
import java.util.Collections;
@ -65,7 +66,7 @@ public class PlaceHolderFieldMapperTests extends MapperServiceTestCase {
SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider()
SourceProvider.fromStoredFields()
);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);
FieldFetcher fieldFetcher = FieldFetcher.create(
@ -74,8 +75,8 @@ public class PlaceHolderFieldMapperTests extends MapperServiceTestCase {
);
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
lookup.source().setSegmentAndDocument(context, 0);
Map<String, DocumentField> fields = fieldFetcher.fetch(lookup.source(), 0);
Source source = lookup.getSource(context, 0);
Map<String, DocumentField> fields = fieldFetcher.fetch(source, 0);
assertEquals(1, fields.size());
assertEquals(List.of("value"), fields.get("field").getValues());
});

View file

@ -16,7 +16,8 @@ import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceProvider;
import java.io.IOException;
import java.util.ArrayList;
@ -76,7 +77,7 @@ public class ProvidedIdFieldMapperTests extends MapperServiceTestCase {
SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider()
SourceProvider.fromStoredFields()
);
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
when(searchExecutionContext.lookup()).thenReturn(lookup);
@ -84,9 +85,9 @@ public class ProvidedIdFieldMapperTests extends MapperServiceTestCase {
ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null);
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
lookup.source().setSegmentAndDocument(context, 0);
Source source = lookup.getSource(context, 0);
valueFetcher.setNextReader(context);
assertEquals(List.of(id), valueFetcher.fetchValues(lookup.source(), 0, new ArrayList<>()));
assertEquals(List.of(id), valueFetcher.fetchValues(source, 0, new ArrayList<>()));
}
);
}

View file

@ -13,7 +13,7 @@ import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentType;
@ -74,20 +74,15 @@ public class RoutingFieldMapperTests extends MetadataMapperTestCase {
mapperService,
iw -> { iw.addDocument(mapperService.documentMapper().parse(source("1", b -> {}, "abcd")).rootDoc()); },
iw -> {
SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider()
);
SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup(mapperService), (ctx, doc) -> null);
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
when(searchExecutionContext.lookup()).thenReturn(lookup);
RoutingFieldMapper.RoutingFieldType ft = (RoutingFieldMapper.RoutingFieldType) mapperService.fieldType("_routing");
ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null);
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
lookup.source().setSegmentAndDocument(context, 0);
valueFetcher.setNextReader(context);
assertEquals(List.of("abcd"), valueFetcher.fetchValues(lookup.source(), 0, new ArrayList<>()));
assertEquals(List.of("abcd"), valueFetcher.fetchValues(Source.empty(XContentType.JSON), 0, new ArrayList<>()));
}
);
}

View file

@ -18,7 +18,7 @@ import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.StringFieldScript;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
@ -65,7 +65,7 @@ public class StringFieldScriptTests extends FieldScriptTestCase<StringFieldScrip
StringFieldScript script = new StringFieldScript(
"test",
Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, (ctx, doc) -> null),
OnScriptError.FAIL,
reader.leaves().get(0)
) {
@ -92,7 +92,7 @@ public class StringFieldScriptTests extends FieldScriptTestCase<StringFieldScrip
StringFieldScript script = new StringFieldScript(
"test",
Map.of(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, (ctx, doc) -> null),
OnScriptError.FAIL,
reader.leaves().get(0)
) {
@ -133,7 +133,7 @@ public class StringFieldScriptTests extends FieldScriptTestCase<StringFieldScrip
StringFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
OnScriptError.FAIL
);
StringFieldScript stringFieldScript = leafFactory.newInstance(reader.leaves().get(0));
@ -163,7 +163,7 @@ public class StringFieldScriptTests extends FieldScriptTestCase<StringFieldScrip
StringFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
OnScriptError.FAIL
);
StringFieldScript stringFieldScript = leafFactory.newInstance(reader.leaves().get(0));

View file

@ -15,7 +15,8 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.Collections;
@ -64,18 +65,13 @@ public class VersionFieldMapperTests extends MetadataMapperTestCase {
iw.addDocument(parsedDoc.rootDoc());
}, iw -> {
VersionFieldMapper.VersionFieldType ft = (VersionFieldMapper.VersionFieldType) mapperService.fieldType("_version");
SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider()
);
SearchLookup lookup = new SearchLookup(mapperService::fieldType, fieldDataLookup(mapperService), (ctx, doc) -> null);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);
ValueFetcher valueFetcher = ft.valueFetcher(searchExecutionContext, null);
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
lookup.source().setSegmentAndDocument(context, 0);
valueFetcher.setNextReader(context);
assertEquals(List.of(version), valueFetcher.fetchValues(lookup.source(), 0, Collections.emptyList()));
assertEquals(List.of(version), valueFetcher.fetchValues(Source.empty(XContentType.JSON), 0, Collections.emptyList()));
});
}

View file

@ -25,6 +25,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
@ -57,6 +58,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.RootObjectMapper;
import org.elasticsearch.index.mapper.RuntimeField;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TestRuntimeField;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.indices.IndicesModule;
@ -69,10 +71,13 @@ import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.LeafDocLookup;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentParserConfiguration;
import org.elasticsearch.xcontent.XContentType;
import org.mockito.stubbing.Answer;
import java.io.IOException;
@ -390,6 +395,30 @@ public class SearchExecutionContextTests extends ESTestCase {
assertTrue(mappingLookup.isMultiField("cat.subfield"));
}
public void testSyntheticSourceScriptLoading() throws IOException {
// Build a mapping using synthetic source
SourceFieldMapper sourceMapper = new SourceFieldMapper.Builder().setSynthetic().build();
RootObjectMapper root = new RootObjectMapper.Builder("_doc", Explicit.IMPLICIT_TRUE).build(MapperBuilderContext.root(true));
Mapping mapping = new Mapping(root, new MetadataFieldMapper[] { sourceMapper }, Map.of());
MappingLookup lookup = MappingLookup.fromMapping(mapping);
SearchExecutionContext sec = createSearchExecutionContext("index", "", lookup, Map.of());
// Attempting to access synthetic source via this context should throw an error
SearchLookup searchLookup = sec.lookup();
Exception e = expectThrows(IllegalArgumentException.class, () -> searchLookup.getSource(null, 0));
assertThat(e.getMessage(), equalTo("Cannot access source from scripts in synthetic mode"));
// Setting the source provider explicitly then gives us a new SearchLookup that can use source
Source source = Source.fromMap(Map.of("field", "value"), XContentType.JSON);
sec.setSourceProvider((ctx, doc) -> source);
SearchLookup searchLookup1 = sec.lookup();
assertNotSame(searchLookup, searchLookup1);
assertSame(source, searchLookup1.getSource(null, 0));
}
public static SearchExecutionContext createSearchExecutionContext(String indexUuid, String clusterAlias) {
return createSearchExecutionContext(indexUuid, clusterAlias, MappingLookup.EMPTY, Map.of());
}

View file

@ -15,7 +15,6 @@ import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.mapper.OnScriptError;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -33,7 +32,7 @@ public class CompositeFieldScriptTests extends ESTestCase {
CompositeFieldScript script = new CompositeFieldScript(
"composite",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, ftd) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, ftd) -> null, (ctx, doc) -> null),
OnScriptError.FAIL,
reader.leaves().get(0)
) {
@ -65,7 +64,7 @@ public class CompositeFieldScriptTests extends ESTestCase {
CompositeFieldScript script = new CompositeFieldScript(
"composite",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, ftd) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, ftd) -> null, (ctx, doc) -> null),
OnScriptError.FAIL,
reader.leaves().get(0)
) {
@ -79,7 +78,7 @@ public class CompositeFieldScriptTests extends ESTestCase {
StringFieldScript stringFieldScript = new StringFieldScript(
"composite.leaf",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, ftd) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, ftd) -> null, (ctx, doc) -> null),
OnScriptError.FAIL,
reader.leaves().get(0)
) {

View file

@ -18,7 +18,6 @@ import org.apache.lucene.tests.index.RandomIndexWriter;
import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.SortedNumericDocValuesLongFieldScript;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -40,7 +39,7 @@ public class SortedNumericDocValuesLongFieldScriptTests extends ESTestCase {
try (DirectoryReader reader = iw.getReader()) {
SortedNumericDocValuesLongFieldScript docValues = new SortedNumericDocValuesLongFieldScript(
"test",
new SearchLookup(field -> null, (ft, lookup, ftd) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, ftd) -> null, (ctx, doc) -> null),
reader.leaves().get(0)
);
List<Long> values = new ArrayList<>();

View file

@ -19,7 +19,6 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.script.AbstractFieldScript;
import org.elasticsearch.script.SortedSetDocValuesStringFieldScript;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -41,7 +40,7 @@ public class SortedSetDocValuesStringFieldScriptTests extends ESTestCase {
try (DirectoryReader reader = iw.getReader()) {
SortedSetDocValuesStringFieldScript docValues = new SortedSetDocValuesStringFieldScript(
"test",
new SearchLookup(field -> null, (ft, lookup, ftd) -> null, new SourceLookup.ReaderSourceProvider()),
new SearchLookup(field -> null, (ft, lookup, ftd) -> null, (ctx, doc) -> null),
reader.leaves().get(0)
);
List<String> values = new ArrayList<>();

View file

@ -36,7 +36,6 @@ import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.CardinalityUpperBound;
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.runtime.StringScriptFieldTermQuery;
import java.io.IOException;
@ -599,7 +598,7 @@ public class RangeAggregatorTests extends AggregatorTestCase {
*/
public void testRuntimeFieldTopLevelQueryNotOptimized() throws IOException {
long totalDocs = (long) RangeAggregator.DOCS_PER_RANGE_TO_USE_FILTERS * 4;
SearchLookup lookup = new SearchLookup(s -> null, (ft, l, ftd) -> null, new SourceLookup.ReaderSourceProvider());
SearchLookup lookup = new SearchLookup(s -> null, (ft, l, ftd) -> null, (ctx, doc) -> null);
StringFieldScript.LeafFactory scriptFactory = ctx -> new StringFieldScript("dummy", Map.of(), lookup, OnScriptError.FAIL, ctx) {
@Override
public void execute() {

View file

@ -114,7 +114,6 @@ import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.runtime.StringScriptFieldTermQuery;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.ScoreSortBuilder;
@ -1995,7 +1994,7 @@ public class TermsAggregatorTests extends AggregatorTestCase {
*/
public void testRuntimeFieldTopLevelNotOptimized() throws IOException {
long totalDocs = 500;
SearchLookup lookup = new SearchLookup(s -> null, (ft, l, ftd) -> null, new SourceLookup.ReaderSourceProvider());
SearchLookup lookup = new SearchLookup(s -> null, (ft, l, ftd) -> null, (ctx, doc) -> null);
StringFieldScript.LeafFactory scriptFactory = ctx -> new StringFieldScript("dummy", Map.of(), lookup, OnScriptError.FAIL, ctx) {
@Override
public void execute() {

View file

@ -19,7 +19,6 @@ import org.elasticsearch.search.aggregations.support.values.ScriptDoubleValues;
import org.elasticsearch.search.aggregations.support.values.ScriptLongValues;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -37,7 +36,7 @@ public class ScriptValuesTests extends ESTestCase {
int index;
FakeAggregationScript(Object[][] values) {
super(Collections.emptyMap(), new SearchLookup(null, null, new SourceLookup.ReaderSourceProvider()) {
super(Collections.emptyMap(), new SearchLookup(null, null, (ctx, doc) -> null) {
@Override
public LeafSearchLookup getLeafSearchLookup(LeafReaderContext context) {

View file

@ -35,7 +35,7 @@ import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceFilter;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.elasticsearch.xcontent.XContentType;
@ -247,10 +247,9 @@ public class FieldFetcherTests extends MapperServiceTestCase {
LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0);
fieldFetcher.setNextReader(readerContext);
SourceLookup sourceLookup = new SourceLookup(new SourceLookup.ReaderSourceProvider());
sourceLookup.setSegmentAndDocument(readerContext, 0);
Source s = SourceProvider.fromStoredFields().getSource(readerContext, 0);
Map<String, DocumentField> fetchedFields = fieldFetcher.fetch(sourceLookup, 0);
Map<String, DocumentField> fetchedFields = fieldFetcher.fetch(s, 0);
assertThat(fetchedFields.size(), equalTo(5));
assertEquals(docId, fetchedFields.get("_id").getValue());
assertEquals(routing, fetchedFields.get("_routing").getValue());
@ -1143,7 +1142,8 @@ public class FieldFetcherTests extends MapperServiceTestCase {
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0);
fieldFetcher.setNextReader(readerContext);
Map<String, DocumentField> fields = fieldFetcher.fetch(new SourceLookup(new SourceLookup.ReaderSourceProvider()), 0);
Source source = SourceProvider.fromStoredFields().getSource(readerContext, 0);
Map<String, DocumentField> fields = fieldFetcher.fetch(source, 0);
assertEquals(1, fields.size());
DocumentField field = fields.get("runtime_field");
assertEquals(1L, (long) field.getValue());
@ -1175,9 +1175,7 @@ public class FieldFetcherTests extends MapperServiceTestCase {
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0);
fieldFetcher.setNextReader(readerContext);
SourceLookup sourceLookup = new SourceLookup(new SourceLookup.ReaderSourceProvider());
sourceLookup.setSegmentAndDocument(readerContext, 0);
Map<String, DocumentField> fields = fieldFetcher.fetch(sourceLookup, 0);
Map<String, DocumentField> fields = fieldFetcher.fetch(Source.empty(XContentType.JSON), 0);
assertEquals(1, fields.size());
DocumentField field = fields.get("_id");
assertEquals("1", field.getValue());

View file

@ -1,77 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.codecs.StoredFieldsReader;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.instanceOf;
public class SourceLookupTests extends ESTestCase {
public void testSetSegmentAndDocument() throws IOException {
try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir)) {
Document doc = new Document();
doc.add(new StringField("field", "value", Field.Store.YES));
doc.add(new StoredField("_source", new BytesRef("{\"field\": \"value\"}")));
iw.addDocument(doc);
try (IndexReader reader = iw.getReader()) {
LeafReaderContext readerContext = reader.leaves().get(0);
SourceLookup sourceLookup = new SourceLookup(new SourceLookup.ReaderSourceProvider());
sourceLookup.setSegmentAndDocument(readerContext, 0);
sourceLookup.source();
assertNotNull(sourceLookup.internalSourceRef());
// Source should be preserved if we pass in the same reader and document
sourceLookup.setSegmentAndDocument(readerContext, 0);
assertNotNull(sourceLookup.internalSourceRef());
// Check that the stored fields reader is not loaded eagerly
LeafReader throwingReader = new SequentialStoredFieldsLeafReader(readerContext.reader()) {
@Override
protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) {
throw new UnsupportedOperationException("attempted to load stored fields reader");
}
@Override
public CacheHelper getReaderCacheHelper() {
return in.getReaderCacheHelper();
}
@Override
public CacheHelper getCoreCacheHelper() {
return in.getCoreCacheHelper();
}
};
sourceLookup.setSegmentAndDocument(throwingReader.getContext(), 0);
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, sourceLookup::source);
assertThat(e.getCause(), instanceOf(UnsupportedOperationException.class));
assertThat(e.getCause().getMessage(), containsString("attempted to load stored fields reader"));
}
}
}
}

View file

@ -0,0 +1,47 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
public class SourceProviderTests extends ESTestCase {
public void testStoredFieldsSourceProvider() throws IOException {
try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir)) {
Document doc = new Document();
doc.add(new StringField("field", "value", Field.Store.YES));
doc.add(new StoredField("_source", new BytesRef("{\"field\": \"value\"}")));
iw.addDocument(doc);
try (IndexReader reader = iw.getReader()) {
LeafReaderContext readerContext = reader.leaves().get(0);
SourceProvider sourceProvider = SourceProvider.fromStoredFields();
Source source = sourceProvider.getSource(readerContext, 0);
assertNotNull(source.internalSourceRef());
// Source should be preserved if we pass in the same reader and document
Source s2 = sourceProvider.getSource(readerContext, 0);
assertSame(s2, source);
}
}
}
}

View file

@ -28,7 +28,6 @@ import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
@ -50,7 +49,7 @@ public class ScriptScoreQueryTests extends ESTestCase {
private DirectoryReader reader;
private IndexSearcher searcher;
private LeafReaderContext leafReaderContext;
private final SearchLookup lookup = new SearchLookup(null, null, new SourceLookup.ReaderSourceProvider());
private final SearchLookup lookup = new SearchLookup(null, null, (ctx, doc) -> null);
@Before
public void initSearcher() throws IOException {
@ -179,7 +178,7 @@ public class ScriptScoreQueryTests extends ESTestCase {
}
@Override
public ScoreScript newInstance(DocReader docReader) throws IOException {
public ScoreScript newInstance(DocReader docReader) {
return new ScoreScript(script.getParams(), lookup, docReader) {
@Override
public double execute(ExplanationHolder explanation) {

View file

@ -26,7 +26,7 @@ import org.elasticsearch.script.AbstractLongFieldScript;
import org.elasticsearch.script.GeoPointFieldScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import java.io.IOException;
import java.util.List;
@ -84,7 +84,7 @@ public class GeoPointScriptFieldDistanceFeatureQueryTests extends AbstractScript
iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"location\": [-3.56, -45.98]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
SearchLookup searchLookup = new SearchLookup(null, null, new SourceLookup.ReaderSourceProvider());
SearchLookup searchLookup = new SearchLookup(null, null, SourceProvider.fromStoredFields());
Function<LeafReaderContext, GeoPointFieldScript> leafFactory = ctx -> new GeoPointFieldScript(
"test",
Map.of(),
@ -93,8 +93,10 @@ public class GeoPointScriptFieldDistanceFeatureQueryTests extends AbstractScript
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
GeoPoint point = GeoUtils.parseGeoPoint(searchLookup.source().source().get("location"), true);
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
GeoPoint point = GeoUtils.parseGeoPoint(source.get("location"), true);
emit(point.lat(), point.lon());
}
};

View file

@ -22,7 +22,7 @@ import org.elasticsearch.script.AbstractLongFieldScript;
import org.elasticsearch.script.DateFieldScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -70,7 +70,7 @@ public class LongScriptFieldDistanceFeatureQueryTests extends AbstractScriptFiel
iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
SearchLookup searchLookup = new SearchLookup(null, null, new SourceLookup.ReaderSourceProvider());
SearchLookup searchLookup = new SearchLookup(null, null, SourceProvider.fromStoredFields());
Function<LeafReaderContext, AbstractLongFieldScript> leafFactory = ctx -> new DateFieldScript(
"test",
Map.of(),
@ -80,8 +80,10 @@ public class LongScriptFieldDistanceFeatureQueryTests extends AbstractScriptFiel
ctx
) {
@Override
@SuppressWarnings("unchecked")
public void execute() {
for (Object timestamp : (List<?>) searchLookup.source().source().get("timestamp")) {
Map<String, Object> source = (Map<String, Object>) this.getParams().get("_source");
for (Object timestamp : (List<?>) source.get("timestamp")) {
emit(((Number) timestamp).longValue());
}
}

View file

@ -11,7 +11,6 @@ import org.elasticsearch.Version;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xcontent.XContentType;
@ -38,9 +37,7 @@ public abstract class FieldTypeTestCase extends ESTestCase {
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
when(searchExecutionContext.allowExpensiveQueries()).thenReturn(allowExpensiveQueries);
when(searchExecutionContext.isSourceEnabled()).thenReturn(true);
SourceLookup sourceLookup = mock(SourceLookup.class);
SearchLookup searchLookup = mock(SearchLookup.class);
when(searchLookup.source()).thenReturn(sourceLookup);
when(searchExecutionContext.lookup()).thenReturn(searchLookup);
when(searchExecutionContext.indexVersionCreated()).thenReturn(Version.CURRENT);
return searchExecutionContext;

View file

@ -49,7 +49,8 @@ import org.elasticsearch.script.field.DocValuesScriptFieldFactory;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.lookup.LeafStoredFieldsLookup;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.ToXContent;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.json.JsonXContent;
@ -509,14 +510,14 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService),
new SourceLookup.ReaderSourceProvider()
SourceProvider.fromStoredFields()
);
ValueFetcher valueFetcher = new DocValueFetcher(format, lookup.getForField(ft, MappedFieldType.FielddataOperation.SEARCH));
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext context = searcher.getIndexReader().leaves().get(0);
lookup.source().setSegmentAndDocument(context, 0);
Source source = lookup.getSource(context, 0);
valueFetcher.setNextReader(context);
result.set(valueFetcher.fetchValues(lookup.source(), 0, new ArrayList<>()));
result.set(valueFetcher.fetchValues(source, 0, new ArrayList<>()));
}
);
return result.get();
@ -530,7 +531,10 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
iw -> {
IndexSearcher searcher = newSearcher(iw);
MappedFieldType ft = mapperService.fieldType("field");
SearchLookup searchLookup = new SearchLookup(null, null, mapperService.mappingLookup().getSourceProvider());
SourceProvider sourceProvider = mapperService.mappingLookup().isSourceSynthetic()
? (ctx, doc) -> { throw new IllegalArgumentException("Can't load source in scripts in synthetic mode"); }
: SourceProvider.fromStoredFields();
SearchLookup searchLookup = new SearchLookup(null, null, sourceProvider);
IndexFieldData<?> sfd = ft.fielddataBuilder(
new FieldDataContext("", () -> searchLookup, Set::of, MappedFieldType.FielddataOperation.SCRIPT)
).build(null, null);
@ -811,12 +815,11 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format);
ParsedDocument doc = mapperService.documentMapper().parse(source);
withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> {
SourceLookup sourceLookup = new SourceLookup(new SourceLookup.ReaderSourceProvider());
sourceLookup.setSegmentAndDocument(ir.leaves().get(0), 0);
Source s = SourceProvider.fromStoredFields().getSource(ir.leaves().get(0), 0);
docValueFetcher.setNextReader(ir.leaves().get(0));
nativeFetcher.setNextReader(ir.leaves().get(0));
List<Object> fromDocValues = docValueFetcher.fetchValues(sourceLookup, 0, new ArrayList<>());
List<Object> fromNative = nativeFetcher.fetchValues(sourceLookup, 0, new ArrayList<>());
List<Object> fromDocValues = docValueFetcher.fetchValues(s, 0, new ArrayList<>());
List<Object> fromNative = nativeFetcher.fetchValues(s, 0, new ArrayList<>());
/*
* The native fetcher uses byte, short, etc but doc values always
* uses long or double. This difference is fine because on the outside
@ -923,7 +926,7 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
SearchLookup lookup = new SearchLookup(
f -> fieldType,
(f, s, t) -> { throw new UnsupportedOperationException(); },
new SourceLookup.ReaderSourceProvider()
(ctx, docid) -> Source.fromBytes(doc.source())
);
withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), ir -> {

View file

@ -20,7 +20,8 @@ import org.elasticsearch.index.mapper.MapperServiceTestCase;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.xcontent.XContentType;
import java.io.IOException;
import java.util.ArrayList;
@ -88,14 +89,14 @@ public class DataTierFieldTypeTests extends MapperServiceTestCase {
public void testFetchValue() throws IOException {
MappedFieldType ft = DataTierFieldMapper.DataTierFieldType.INSTANCE;
SourceLookup lookup = new SourceLookup(new SourceLookup.NullSourceProvider());
Source source = Source.empty(XContentType.JSON);
List<Object> ignoredValues = new ArrayList<>();
ValueFetcher valueFetcher = ft.valueFetcher(createContext(), null);
assertEquals(singletonList("data_warm"), valueFetcher.fetchValues(lookup, -1, ignoredValues));
assertEquals(singletonList("data_warm"), valueFetcher.fetchValues(source, -1, ignoredValues));
ValueFetcher emptyValueFetcher = ft.valueFetcher(createContextWithoutSetting(), null);
assertTrue(emptyValueFetcher.fetchValues(lookup, -1, ignoredValues).isEmpty());
assertTrue(emptyValueFetcher.fetchValues(source, -1, ignoredValues).isEmpty());
}
private SearchExecutionContext createContext() {

View file

@ -27,7 +27,6 @@ import org.elasticsearch.script.DocReader;
import org.elasticsearch.script.ScoreScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.AggregateDoubleMetricFieldType;
import org.elasticsearch.xpack.aggregatemetric.mapper.AggregateDoubleMetricFieldMapper.Metric;
@ -127,7 +126,7 @@ public class AggregateDoubleMetricFieldTypeTests extends FieldTypeTestCase {
(mft, lookupSupplier, fdo) -> mft.fielddataBuilder(
new FieldDataContext("test", lookupSupplier, searchExecutionContext::sourcePath, fdo)
).build(null, null),
new SourceLookup.ReaderSourceProvider()
(ctx, doc) -> null
);
when(searchExecutionContext.lookup()).thenReturn(lookup);
IndexSearcher searcher = newSearcher(reader);

View file

@ -16,7 +16,8 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.script.field.DocValuesScriptFieldFactory;
import org.elasticsearch.script.field.ToScriptFieldFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceProvider;
import java.io.IOException;
import java.util.ArrayList;
@ -40,10 +41,10 @@ public class SourceValueFetcherSortedUnsignedLongIndexFieldData extends SourceVa
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedNumericDocValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
@ -52,7 +53,7 @@ public class SourceValueFetcherSortedUnsignedLongIndexFieldData extends SourceVa
fieldName,
valuesSourceType,
valueFetcher,
sourceLookup,
sourceProvider,
toScriptFieldFactory
);
}
@ -62,15 +63,15 @@ public class SourceValueFetcherSortedUnsignedLongIndexFieldData extends SourceVa
String fieldName,
ValuesSourceType valuesSourceType,
ValueFetcher valueFetcher,
SourceLookup sourceLookup,
SourceProvider sourceProvider,
ToScriptFieldFactory<SortedNumericDocValues> toScriptFieldFactory
) {
super(fieldName, valuesSourceType, valueFetcher, sourceLookup, toScriptFieldFactory);
super(fieldName, valuesSourceType, valueFetcher, sourceProvider, toScriptFieldFactory);
}
@Override
public SourceValueFetcherLeafFieldData<SortedNumericDocValues> loadDirect(LeafReaderContext context) {
return new SourceValueFetcherSortedUnsignedLongLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceLookup);
return new SourceValueFetcherSortedUnsignedLongLeafFieldData(toScriptFieldFactory, context, valueFetcher, sourceProvider);
}
private static class SourceValueFetcherSortedUnsignedLongLeafFieldData extends SourceValueFetcherLeafFieldData<SortedNumericDocValues> {
@ -79,15 +80,15 @@ public class SourceValueFetcherSortedUnsignedLongIndexFieldData extends SourceVa
ToScriptFieldFactory<SortedNumericDocValues> toScriptFieldFactory,
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceLookup);
super(toScriptFieldFactory, leafReaderContext, valueFetcher, sourceProvider);
}
@Override
public DocValuesScriptFieldFactory getScriptFieldFactory(String name) {
return toScriptFieldFactory.getScriptFieldFactory(
new SourceValueFetcherSortedUnsignedLongDocValues(leafReaderContext, valueFetcher, sourceLookup),
new SourceValueFetcherSortedUnsignedLongDocValues(leafReaderContext, valueFetcher, sourceProvider),
name
);
}
@ -98,7 +99,7 @@ public class SourceValueFetcherSortedUnsignedLongIndexFieldData extends SourceVa
private final LeafReaderContext leafReaderContext;
private final ValueFetcher valueFetcher;
private final SourceLookup sourceLookup;
private final SourceProvider sourceProvider;
private List<Long> values;
private Iterator<Long> iterator;
@ -106,21 +107,20 @@ public class SourceValueFetcherSortedUnsignedLongIndexFieldData extends SourceVa
private SourceValueFetcherSortedUnsignedLongDocValues(
LeafReaderContext leafReaderContext,
ValueFetcher valueFetcher,
SourceLookup sourceLookup
SourceProvider sourceProvider
) {
this.leafReaderContext = leafReaderContext;
this.valueFetcher = valueFetcher;
this.sourceLookup = sourceLookup;
this.sourceProvider = sourceProvider;
values = new ArrayList<>();
}
@Override
public boolean advanceExact(int doc) throws IOException {
sourceLookup.setSegmentAndDocument(leafReaderContext, doc);
values.clear();
for (Object value : valueFetcher.fetchValues(sourceLookup, doc, Collections.emptyList())) {
Source source = sourceProvider.getSource(leafReaderContext, doc);
for (Object value : valueFetcher.fetchValues(source, doc, Collections.emptyList())) {
assert value instanceof Number;
values.add(((Number) value).longValue() ^ MASK_2_63);
}

View file

@ -322,7 +322,7 @@ public class UnsignedLongFieldMapper extends FieldMapper {
name(),
valuesSourceType,
sourceValueFetcher(sourcePaths),
searchLookup.source(),
searchLookup,
UnsignedLongDocValuesField::new
);
}

View file

@ -10,8 +10,10 @@ package org.elasticsearch.xpack.ml.aggs.categorization;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.util.BytesRefHash;
import org.elasticsearch.common.util.ObjectArray;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.core.Releasables;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.aggregations.AggregationExecutionContext;
@ -25,7 +27,9 @@ import org.elasticsearch.search.aggregations.bucket.DeferableBucketAggregator;
import org.elasticsearch.search.aggregations.bucket.terms.LongKeyedBucketOrds;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.search.lookup.Source;
import org.elasticsearch.search.lookup.SourceFilter;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig;
import org.elasticsearch.xpack.ml.aggs.categorization.InternalCategorizationAggregation.Bucket;
import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer;
@ -40,7 +44,8 @@ import java.util.Optional;
public class CategorizeTextAggregator extends DeferableBucketAggregator {
private final TermsAggregator.BucketCountThresholds bucketCountThresholds;
private final SourceLookup sourceLookup;
private final SourceProvider sourceProvider;
private final SourceFilter sourceFilter;
private final MappedFieldType fieldType;
private final CategorizationAnalyzer analyzer;
private final String sourceFieldName;
@ -63,8 +68,9 @@ public class CategorizeTextAggregator extends DeferableBucketAggregator {
Map<String, Object> metadata
) throws IOException {
super(name, factories, context, parent, metadata);
this.sourceLookup = context.lookup().source();
this.sourceProvider = context.lookup();
this.sourceFieldName = sourceFieldName;
this.sourceFilter = new SourceFilter(new String[] { sourceFieldName }, Strings.EMPTY_ARRAY);
this.fieldType = fieldType;
CategorizationAnalyzerConfig analyzerConfig = Optional.ofNullable(categorizationAnalyzerConfig)
.orElse(CategorizationAnalyzerConfig.buildStandardCategorizationAnalyzer(List.of()));
@ -156,8 +162,8 @@ public class CategorizeTextAggregator extends DeferableBucketAggregator {
}
private void collectFromSource(int doc, long owningBucketOrd, TokenListCategorizer categorizer) throws IOException {
sourceLookup.setSegmentAndDocument(aggCtx.getLeafReaderContext(), doc);
Iterator<String> itr = sourceLookup.extractRawValuesWithoutCaching(sourceFieldName).stream().map(obj -> {
Source source = sourceProvider.getSource(aggCtx.getLeafReaderContext(), doc).filter(sourceFilter);
Iterator<String> itr = XContentMapValues.extractRawValues(sourceFieldName, source.source()).stream().map(obj -> {
if (obj instanceof BytesRef) {
return fieldType.valueForDisplay(obj).toString();
}