Refactor SourceProvider creation to consistently use MappingLookup (#128213)

This change updates the code to always create SourceProvider instances via MappingLookup, avoiding direct exposure to the underlying source format (synthetic or stored).
It also aligns source filtering behaviour between SourceProvider and SourceLoader, ensuring consistent application of filters.

This change is needed to enable source filtering to occur earlier in the fetch phase, for example, when constructing a synthetic source.
This commit is contained in:
Jim Ferenczi 2025-05-22 14:45:13 +01:00 committed by GitHub
parent 7eed09644c
commit 54af815ad9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
42 changed files with 205 additions and 202 deletions

View file

@ -30,8 +30,10 @@ import org.elasticsearch.index.fielddata.FieldDataContext;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.mapper.SourceFieldMetrics;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.plugins.PluginsLoader;
@ -90,7 +92,7 @@ public class ScriptScoreBenchmark {
private final SearchLookup lookup = new SearchLookup(
fieldTypes::get,
(mft, lookup, fdo) -> mft.fielddataBuilder(FieldDataContext.noRuntimeFields("benchmark")).build(fieldDataCache, breakerService),
SourceProvider.fromStoredFields()
SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP)
);
@Param({ "expression", "metal", "painless_cast", "painless_def" })

View file

@ -0,0 +1,5 @@
pr: 128213
summary: Refactor `SourceProvider` creation to consistently use `MappingLookup`
area: Mapping
type: enhancement
issues: []

View file

@ -107,7 +107,8 @@ public class Murmur3FieldMapperTests extends MapperTestCase {
ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format);
ParsedDocument doc = mapperService.documentMapper().parse(source);
withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> {
Source s = SourceProvider.fromStoredFields().getSource(ir.leaves().get(0), 0);
Source s = SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics())
.getSource(ir.leaves().get(0), 0);
docValueFetcher.setNextReader(ir.leaves().get(0));
nativeFetcher.setNextReader(ir.leaves().get(0));
List<Object> fromDocValues = docValueFetcher.fetchValues(s, 0, new ArrayList<>());

View file

@ -66,6 +66,9 @@ public abstract class StoredFieldLoader {
* otherwise, uses the heuristic defined in {@link StoredFieldLoader#reader(LeafReaderContext, int[])}.
*/
public static StoredFieldLoader create(boolean loadSource, Set<String> fields, boolean forceSequentialReader) {
if (loadSource == false && fields.isEmpty()) {
return StoredFieldLoader.empty();
}
List<String> fieldsToLoad = fieldsToLoad(loadSource, fields);
return new StoredFieldLoader() {
@Override

View file

@ -19,6 +19,7 @@ import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.internal.Client;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion;
@ -39,6 +40,7 @@ import org.elasticsearch.search.NestedDocuments;
import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry;
import org.elasticsearch.search.lookup.LeafFieldLookupProvider;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceFilter;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.xcontent.XContentParserConfiguration;
@ -162,8 +164,8 @@ public class FilteredSearchExecutionContext extends SearchExecutionContext {
}
@Override
public SourceLoader newSourceLoader(boolean forceSyntheticSource) {
return in.newSourceLoader(forceSyntheticSource);
public SourceLoader newSourceLoader(@Nullable SourceFilter filter, boolean forceSyntheticSource) {
return in.newSourceLoader(filter, forceSyntheticSource);
}
@Override

View file

@ -25,6 +25,7 @@ import org.elasticsearch.client.internal.Client;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexSortConfig;
@ -57,6 +58,7 @@ import org.elasticsearch.search.NestedDocuments;
import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry;
import org.elasticsearch.search.lookup.LeafFieldLookupProvider;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceFilter;
import org.elasticsearch.search.lookup.SourceProvider;
import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.xcontent.XContentParserConfiguration;
@ -439,15 +441,15 @@ public class SearchExecutionContext extends QueryRewriteContext {
/**
* Build something to load source {@code _source}.
*/
public SourceLoader newSourceLoader(boolean forceSyntheticSource) {
public SourceLoader newSourceLoader(@Nullable SourceFilter filter, boolean forceSyntheticSource) {
if (forceSyntheticSource) {
return new SourceLoader.Synthetic(
null,
filter,
() -> mappingLookup.getMapping().syntheticFieldLoader(null),
mapperMetrics.sourceFieldMetrics()
);
}
return mappingLookup.newSourceLoader(null, mapperMetrics.sourceFieldMetrics());
return mappingLookup.newSourceLoader(filter, mapperMetrics.sourceFieldMetrics());
}
/**
@ -506,9 +508,7 @@ public class SearchExecutionContext extends QueryRewriteContext {
}
public SourceProvider createSourceProvider() {
return isSourceSynthetic()
? SourceProvider.fromSyntheticSource(mappingLookup.getMapping(), null, mapperMetrics.sourceFieldMetrics())
: SourceProvider.fromStoredFields();
return SourceProvider.fromLookup(mappingLookup, null, mapperMetrics.sourceFieldMetrics());
}
/**

View file

@ -70,6 +70,7 @@ import org.elasticsearch.search.internal.ScrollContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchContextId;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.lookup.SourceFilter;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext;
@ -943,8 +944,8 @@ final class DefaultSearchContext extends SearchContext {
}
@Override
public SourceLoader newSourceLoader() {
return searchExecutionContext.newSourceLoader(request.isForceSyntheticSource());
public SourceLoader newSourceLoader(@Nullable SourceFilter filter) {
return searchExecutionContext.newSourceLoader(filter, request.isForceSyntheticSource());
}
@Override

View file

@ -45,9 +45,9 @@ public class FetchContext {
/**
* Create a FetchContext based on a SearchContext
*/
public FetchContext(SearchContext searchContext) {
public FetchContext(SearchContext searchContext, SourceLoader sourceLoader) {
this.searchContext = searchContext;
this.sourceLoader = searchContext.newSourceLoader();
this.sourceLoader = sourceLoader;
this.storedFieldsContext = buildStoredFieldsContext(searchContext);
this.fetchSourceContext = buildFetchSourceContext(searchContext);
}

View file

@ -111,9 +111,8 @@ public final class FetchPhase {
}
private SearchHits buildSearchHits(SearchContext context, int[] docIdsToLoad, Profiler profiler, RankDocShardInfo rankDocs) {
FetchContext fetchContext = new FetchContext(context);
SourceLoader sourceLoader = context.newSourceLoader();
SourceLoader sourceLoader = context.newSourceLoader(null);
FetchContext fetchContext = new FetchContext(context, sourceLoader);
PreloadedSourceProvider sourceProvider = new PreloadedSourceProvider();
PreloadedFieldLookupProvider fieldLookupProvider = new PreloadedFieldLookupProvider();

View file

@ -14,6 +14,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.mapper.IdLoader;
@ -33,6 +34,7 @@ import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.lookup.SourceFilter;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext;
@ -453,8 +455,8 @@ public abstract class FilteredSearchContext extends SearchContext {
}
@Override
public SourceLoader newSourceLoader() {
return in.newSourceLoader();
public SourceLoader newSourceLoader(@Nullable SourceFilter filter) {
return in.newSourceLoader(filter);
}
@Override

View file

@ -41,6 +41,7 @@ import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.lookup.SourceFilter;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QueryPhase;
import org.elasticsearch.search.query.QuerySearchResult;
@ -441,7 +442,7 @@ public abstract class SearchContext implements Releasable {
/**
* Build something to load source {@code _source}.
*/
public abstract SourceLoader newSourceLoader();
public abstract SourceLoader newSourceLoader(@Nullable SourceFilter sourceFilter);
public abstract IdLoader newIdLoader();
}

View file

@ -0,0 +1,72 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader;
import org.elasticsearch.index.fieldvisitor.StoredFieldLoader;
import org.elasticsearch.index.mapper.SourceLoader;
import java.io.IOException;
import java.util.Map;
/**
* A {@link SourceProvider} that loads _source from a concurrent search.
*
* NOTE: This is written under the assumption that individual segments are accessed by a single
* thread, even if separate segments may be searched concurrently. If we ever implement
* within-segment concurrency this will have to work entirely differently.
* **/
class ConcurrentSegmentSourceProvider implements SourceProvider {
private final SourceLoader sourceLoader;
private final StoredFieldLoader storedFieldLoader;
private final Map<Object, Leaf> leaves = ConcurrentCollections.newConcurrentMap();
ConcurrentSegmentSourceProvider(SourceLoader loader, boolean loadSource) {
this.sourceLoader = loader;
// we force a sequential reader here since it is used during query execution where documents are scanned sequentially
this.storedFieldLoader = StoredFieldLoader.create(loadSource, sourceLoader.requiredStoredFields(), true);
}
@Override
public Source getSource(LeafReaderContext ctx, int doc) throws IOException {
final Object id = ctx.id();
var leaf = leaves.get(id);
if (leaf == null) {
leaf = new Leaf(sourceLoader.leaf(ctx.reader(), null), storedFieldLoader.getLoader(ctx, null));
var existing = leaves.put(id, leaf);
assert existing == null : "unexpected source provider [" + existing + "]";
}
return leaf.getSource(ctx, doc);
}
private static class Leaf implements SourceProvider {
private final SourceLoader.Leaf sourceLoader;
private final LeafStoredFieldLoader storedFieldLoader;
int doc = -1;
Source source = null;
private Leaf(SourceLoader.Leaf sourceLoader, LeafStoredFieldLoader storedFieldLoader) {
this.sourceLoader = sourceLoader;
this.storedFieldLoader = storedFieldLoader;
}
@Override
public Source getSource(LeafReaderContext ctx, int doc) throws IOException {
if (this.doc == doc) {
return source;
}
this.doc = doc;
storedFieldLoader.advanceTo(doc);
return source = sourceLoader.source(storedFieldLoader, doc);
}
}
}

View file

@ -10,10 +10,8 @@
package org.elasticsearch.search.lookup;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.index.fieldvisitor.StoredFieldLoader;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.SourceFieldMetrics;
import org.elasticsearch.index.mapper.SourceLoader;
import java.io.IOException;
@ -28,27 +26,14 @@ public interface SourceProvider {
Source getSource(LeafReaderContext ctx, int doc) throws IOException;
/**
* A SourceProvider that loads source from stored fields
* A SourceProvider that delegate loading source to the provided {@link MappingLookup}.
*
* The returned SourceProvider is thread-safe across segments, in that it may be
* safely used by a searcher that searches different segments on different threads,
* but it is not safe to use this to access documents from the same segment across
* multiple threads.
*/
static SourceProvider fromStoredFields() {
StoredFieldLoader storedFieldLoader = StoredFieldLoader.sequentialSource();
return new StoredFieldSourceProvider(storedFieldLoader);
}
/**
* A SourceProvider that loads source from synthetic source
*
* The returned SourceProvider is thread-safe across segments, in that it may be
* safely used by a searcher that searches different segments on different threads,
* but it is not safe to use this to access documents from the same segment across
* multiple threads.
*/
static SourceProvider fromSyntheticSource(Mapping mapping, SourceFilter filter, SourceFieldMetrics metrics) {
return new SyntheticSourceProvider(new SourceLoader.Synthetic(filter, () -> mapping.syntheticFieldLoader(filter), metrics));
static SourceProvider fromLookup(MappingLookup lookup, SourceFilter filter, SourceFieldMetrics metrics) {
return new ConcurrentSegmentSourceProvider(lookup.newSourceLoader(filter, metrics), lookup.isSourceSynthetic() == false);
}
}

View file

@ -1,63 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader;
import org.elasticsearch.index.fieldvisitor.StoredFieldLoader;
import java.io.IOException;
import java.util.Map;
// NB This is written under the assumption that individual segments are accessed by a single
// thread, even if separate segments may be searched concurrently. If we ever implement
// within-segment concurrency this will have to work entirely differently.
class StoredFieldSourceProvider implements SourceProvider {
private final StoredFieldLoader storedFieldLoader;
private final Map<Object, LeafStoredFieldSourceProvider> leaves = ConcurrentCollections.newConcurrentMap();
StoredFieldSourceProvider(StoredFieldLoader storedFieldLoader) {
this.storedFieldLoader = storedFieldLoader;
}
@Override
public Source getSource(LeafReaderContext ctx, int doc) throws IOException {
final Object id = ctx.id();
var provider = leaves.get(id);
if (provider == null) {
provider = new LeafStoredFieldSourceProvider(storedFieldLoader.getLoader(ctx, null));
var existing = leaves.put(id, provider);
assert existing == null : "unexpected source provider [" + existing + "]";
}
return provider.getSource(doc);
}
private static class LeafStoredFieldSourceProvider {
final LeafStoredFieldLoader leafStoredFieldLoader;
int doc = -1;
Source source;
private LeafStoredFieldSourceProvider(LeafStoredFieldLoader leafStoredFieldLoader) {
this.leafStoredFieldLoader = leafStoredFieldLoader;
}
Source getSource(int doc) throws IOException {
if (this.doc == doc) {
return source;
}
this.doc = doc;
leafStoredFieldLoader.advanceTo(doc);
return source = Source.fromBytes(leafStoredFieldLoader.source());
}
}
}

View file

@ -1,61 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the "Elastic License
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
* Public License v 1"; you may not use this file except in compliance with, at
* your election, the "Elastic License 2.0", the "GNU Affero General Public
* License v3.0 only", or the "Server Side Public License, v 1".
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader;
import org.elasticsearch.index.fieldvisitor.StoredFieldLoader;
import org.elasticsearch.index.mapper.SourceLoader;
import java.io.IOException;
import java.util.Map;
// NB This is written under the assumption that individual segments are accessed by a single
// thread, even if separate segments may be searched concurrently. If we ever implement
// within-segment concurrency this will have to work entirely differently.
class SyntheticSourceProvider implements SourceProvider {
private final SourceLoader sourceLoader;
private final Map<Object, SyntheticSourceLeafLoader> leaves = ConcurrentCollections.newConcurrentMap();
SyntheticSourceProvider(SourceLoader sourceLoader) {
this.sourceLoader = sourceLoader;
}
@Override
public Source getSource(LeafReaderContext ctx, int doc) throws IOException {
final Object id = ctx.id();
var provider = leaves.get(id);
if (provider == null) {
provider = new SyntheticSourceLeafLoader(ctx);
var existing = leaves.put(id, provider);
assert existing == null : "unexpected source provider [" + existing + "]";
}
return provider.getSource(doc);
}
private class SyntheticSourceLeafLoader {
private final LeafStoredFieldLoader leafLoader;
private final SourceLoader.Leaf leaf;
SyntheticSourceLeafLoader(LeafReaderContext ctx) throws IOException {
this.leafLoader = (sourceLoader.requiredStoredFields().isEmpty())
? StoredFieldLoader.empty().getLoader(ctx, null)
: StoredFieldLoader.create(false, sourceLoader.requiredStoredFields()).getLoader(ctx, null);
this.leaf = sourceLoader.leaf(ctx.reader(), null);
}
Source getSource(int doc) throws IOException {
leafLoader.advanceTo(doc);
return leaf.source(leafLoader, doc);
}
}
}

View file

@ -14,6 +14,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.mapper.IdLoader;
@ -41,6 +42,7 @@ import org.elasticsearch.search.internal.ScrollContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchContextId;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.lookup.SourceFilter;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext;
@ -536,7 +538,7 @@ public class RankSearchContext extends SearchContext {
}
@Override
public SourceLoader newSourceLoader() {
public SourceLoader newSourceLoader(@Nullable SourceFilter filter) {
throw new UnsupportedOperationException();
}

View file

@ -346,7 +346,7 @@ public class CompositeRuntimeFieldTests extends MapperServiceTestCase {
(mft, lookupSupplier, fdo) -> mft.fielddataBuilder(
new FieldDataContext("test", null, lookupSupplier, mapperService.mappingLookup()::sourcePaths, fdo)
).build(null, null),
SourceProvider.fromStoredFields()
SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics())
);
LeafSearchLookup leafSearchLookup = searchLookup.getLeafSearchLookup(reader.leaves().get(0));

View file

@ -106,7 +106,11 @@ public class DateFieldScriptTests extends FieldScriptTestCase<DateFieldScript.Fa
DateFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
new SearchLookup(
field -> null,
(ft, lookup, fdt) -> null,
SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP)
),
DateFormatter.forPattern("epoch_millis"),
OnScriptError.FAIL
);

View file

@ -103,7 +103,11 @@ public class DoubleFieldScriptTests extends FieldScriptTestCase<DoubleFieldScrip
DoubleFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
new SearchLookup(
field -> null,
(ft, lookup, fdt) -> null,
SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP)
),
OnScriptError.FAIL
);
DoubleFieldScript doubleFieldScript = leafFactory.newInstance(reader.leaves().get(0));

View file

@ -104,7 +104,11 @@ public class IpFieldScriptTests extends FieldScriptTestCase<IpFieldScript.Factor
IpFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
new SearchLookup(
field -> null,
(ft, lookup, fdt) -> null,
SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP)
),
OnScriptError.FAIL
);
IpFieldScript ipFieldScript = leafFactory.newInstance(reader.leaves().get(0));

View file

@ -103,7 +103,11 @@ public class LongFieldScriptTests extends FieldScriptTestCase<LongFieldScript.Fa
LongFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
new SearchLookup(
field -> null,
(ft, lookup, fdt) -> null,
SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP)
),
OnScriptError.FAIL
);
LongFieldScript longFieldScript = leafFactory.newInstance(reader.leaves().get(0));

View file

@ -67,7 +67,7 @@ public class PlaceHolderFieldMapperTests extends MapperServiceTestCase {
SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService),
SourceProvider.fromStoredFields()
SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics())
);
SearchExecutionContext searchExecutionContext = createSearchExecutionContext(mapperService);
FieldFetcher fieldFetcher = FieldFetcher.create(

View file

@ -77,7 +77,7 @@ public class ProvidedIdFieldMapperTests extends MapperServiceTestCase {
SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService),
SourceProvider.fromStoredFields()
SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics())
);
SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class);
when(searchExecutionContext.lookup()).thenReturn(lookup);

View file

@ -408,7 +408,7 @@ public abstract class RangeFieldMapperTests extends MapperTestCase {
iw.addDocument(doc);
iw.close();
try (DirectoryReader reader = DirectoryReader.open(directory)) {
SourceProvider provider = SourceProvider.fromSyntheticSource(mapper.mapping(), null, SourceFieldMetrics.NOOP);
SourceProvider provider = SourceProvider.fromLookup(mapper.mappers(), null, SourceFieldMetrics.NOOP);
Source syntheticSource = provider.getSource(getOnlyLeafReader(reader).getContext(), 0);
return syntheticSource;

View file

@ -45,11 +45,7 @@ public class SourceFieldMetricsTests extends MapperServiceTestCase {
iw.addDocument(doc);
iw.close();
try (DirectoryReader reader = DirectoryReader.open(directory)) {
SourceProvider provider = SourceProvider.fromSyntheticSource(
mapper.mapping(),
null,
createTestMapperMetrics().sourceFieldMetrics()
);
SourceProvider provider = SourceProvider.fromLookup(mapper.mappers(), null, createTestMapperMetrics().sourceFieldMetrics());
Source synthetic = provider.getSource(getOnlyLeafReader(reader).getContext(), 0);
assertEquals(synthetic.source().get("kwd"), "foo");
}

View file

@ -134,7 +134,11 @@ public class StringFieldScriptTests extends FieldScriptTestCase<StringFieldScrip
StringFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
new SearchLookup(
field -> null,
(ft, lookup, fdt) -> null,
SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP)
),
OnScriptError.FAIL
);
StringFieldScript stringFieldScript = leafFactory.newInstance(reader.leaves().get(0));
@ -164,7 +168,11 @@ public class StringFieldScriptTests extends FieldScriptTestCase<StringFieldScrip
StringFieldScript.LeafFactory leafFactory = fromSource().newFactory(
"field",
Collections.emptyMap(),
new SearchLookup(field -> null, (ft, lookup, fdt) -> null, SourceProvider.fromStoredFields()),
new SearchLookup(
field -> null,
(ft, lookup, fdt) -> null,
SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP)
),
OnScriptError.FAIL
);
StringFieldScript stringFieldScript = leafFactory.newInstance(reader.leaves().get(0));

View file

@ -1300,7 +1300,7 @@ public class TextFieldMapperTests extends MapperTestCase {
MappedFieldType ft = mapperService.fieldType("field");
SourceProvider sourceProvider = mapperService.mappingLookup().isSourceSynthetic() ? (ctx, doc) -> {
throw new IllegalArgumentException("Can't load source in scripts in synthetic mode");
} : SourceProvider.fromStoredFields();
} : SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics());
SearchLookup searchLookup = new SearchLookup(null, null, sourceProvider);
IndexFieldData<?> sfd = ft.fielddataBuilder(
new FieldDataContext("", null, () -> searchLookup, Set::of, MappedFieldType.FielddataOperation.SCRIPT)

View file

@ -1760,7 +1760,8 @@ public class DenseVectorFieldMapperTests extends MapperTestCase {
ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format);
ParsedDocument doc = mapperService.documentMapper().parse(source);
withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> {
Source s = SourceProvider.fromStoredFields().getSource(ir.leaves().get(0), 0);
Source s = SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics())
.getSource(ir.leaves().get(0), 0);
nativeFetcher.setNextReader(ir.leaves().get(0));
List<Object> fromNative = nativeFetcher.fetchValues(s, 0, new ArrayList<>());
DenseVectorFieldType denseVectorFieldType = (DenseVectorFieldType) ft;

View file

@ -29,8 +29,9 @@ public class StoredFieldsSpecTests extends ESTestCase {
public void testDefaults() {
SearchSourceBuilder search = new SearchSourceBuilder();
var context = searchContext(search);
// defaults - return source and metadata fields
FetchContext fc = new FetchContext(searchContext(search));
FetchContext fc = new FetchContext(context, context.newSourceLoader(null));
FetchSubPhaseProcessor sourceProcessor = new FetchSourcePhase().getProcessor(fc);
assertNotNull(sourceProcessor);
@ -52,7 +53,8 @@ public class StoredFieldsSpecTests extends ESTestCase {
public void testStoredFieldsDisabled() {
SearchSourceBuilder search = new SearchSourceBuilder();
search.storedField("_none_");
FetchContext fc = new FetchContext(searchContext(search));
var context = searchContext(search);
FetchContext fc = new FetchContext(context, context.newSourceLoader(null));
assertNull(new StoredFieldsPhase().getProcessor(fc));
assertNull(new FetchSourcePhase().getProcessor(fc));
@ -61,7 +63,8 @@ public class StoredFieldsSpecTests extends ESTestCase {
public void testScriptFieldsEnableMetadata() {
SearchSourceBuilder search = new SearchSourceBuilder();
search.scriptField("field", new Script("script"));
FetchContext fc = new FetchContext(searchContext(search));
var context = searchContext(search);
FetchContext fc = new FetchContext(context, null);
FetchSubPhaseProcessor subPhaseProcessor = new ScriptFieldsPhase().getProcessor(fc);
assertNotNull(subPhaseProcessor);

View file

@ -253,7 +253,8 @@ public class FieldFetcherTests extends MapperServiceTestCase {
LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0);
fieldFetcher.setNextReader(readerContext);
Source s = SourceProvider.fromStoredFields().getSource(readerContext, 0);
Source s = SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics())
.getSource(readerContext, 0);
Map<String, DocumentField> fetchedFields = fieldFetcher.fetch(s, 0);
assertThat(fetchedFields.size(), equalTo(5));
@ -1539,7 +1540,11 @@ public class FieldFetcherTests extends MapperServiceTestCase {
IndexSearcher searcher = newSearcher(iw);
LeafReaderContext readerContext = searcher.getIndexReader().leaves().get(0);
fieldFetcher.setNextReader(readerContext);
Source source = SourceProvider.fromStoredFields().getSource(readerContext, 0);
Source source = SourceProvider.fromLookup(
mapperService.mappingLookup(),
null,
mapperService.getMapperMetrics().sourceFieldMetrics()
).getSource(readerContext, 0);
Map<String, DocumentField> fields = fieldFetcher.fetch(source, 0);
assertEquals(1, fields.size());
DocumentField field = fields.get("runtime_field");

View file

@ -27,6 +27,8 @@ import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.SourceFieldMetrics;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -46,7 +48,7 @@ public class SourceProviderTests extends ESTestCase {
try (IndexReader reader = iw.getReader()) {
LeafReaderContext readerContext = reader.leaves().get(0);
SourceProvider sourceProvider = SourceProvider.fromStoredFields();
SourceProvider sourceProvider = SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP);
Source source = sourceProvider.getSource(readerContext, 0);
assertNotNull(source.internalSourceRef());
@ -121,7 +123,7 @@ public class SourceProviderTests extends ESTestCase {
}
private static CollectorManager<SourceAssertingCollector, ?> assertingCollectorManager() {
SourceProvider sourceProvider = SourceProvider.fromStoredFields();
SourceProvider sourceProvider = SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP);
return new CollectorManager<>() {
@Override
public SourceAssertingCollector newCollector() {

View file

@ -26,7 +26,9 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.index.mapper.GeoPointScriptFieldType;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.OnScriptError;
import org.elasticsearch.index.mapper.SourceFieldMetrics;
import org.elasticsearch.script.AbstractLongFieldScript;
import org.elasticsearch.script.GeoPointFieldScript;
import org.elasticsearch.script.Script;
@ -93,7 +95,11 @@ public class GeoPointScriptFieldDistanceFeatureQueryTests extends AbstractScript
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
SearchLookup searchLookup = new SearchLookup(null, null, SourceProvider.fromStoredFields());
SearchLookup searchLookup = new SearchLookup(
null,
null,
SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP)
);
Function<LeafReaderContext, GeoPointFieldScript> leafFactory = ctx -> new GeoPointFieldScript(
"test",
Map.of(),

View file

@ -18,7 +18,9 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.tests.index.RandomIndexWriter;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.OnScriptError;
import org.elasticsearch.index.mapper.SourceFieldMetrics;
import org.elasticsearch.script.AbstractLongFieldScript;
import org.elasticsearch.script.DateFieldScript;
import org.elasticsearch.script.Script;
@ -71,7 +73,11 @@ public class LongScriptFieldDistanceFeatureQueryTests extends AbstractScriptFiel
iw.addDocument(List.of(new StoredField("_source", new BytesRef("{\"timestamp\": [1595432181351]}"))));
try (DirectoryReader reader = iw.getReader()) {
IndexSearcher searcher = newSearcher(reader);
SearchLookup searchLookup = new SearchLookup(null, null, SourceProvider.fromStoredFields());
SearchLookup searchLookup = new SearchLookup(
null,
null,
SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP)
);
Function<LeafReaderContext, AbstractLongFieldScript> leafFactory = ctx -> new DateFieldScript(
"test",
Map.of(),

View file

@ -287,7 +287,11 @@ public abstract class AbstractScriptFieldTypeTestCase extends MapperServiceTestC
}
protected static SearchExecutionContext mockContext(boolean allowExpensiveQueries, MappedFieldType mappedFieldType) {
return mockContext(allowExpensiveQueries, mappedFieldType, SourceProvider.fromStoredFields());
return mockContext(
allowExpensiveQueries,
mappedFieldType,
SourceProvider.fromLookup(MappingLookup.EMPTY, null, SourceFieldMetrics.NOOP)
);
}
protected static SearchExecutionContext mockContext(

View file

@ -858,7 +858,7 @@ public abstract class MapperServiceTestCase extends FieldTypeTestCase {
final String synthetic1;
final XContent xContent;
{
SourceProvider provider = SourceProvider.fromSyntheticSource(mapper.mapping(), filter, SourceFieldMetrics.NOOP);
SourceProvider provider = SourceProvider.fromLookup(mapper.mappers(), filter, SourceFieldMetrics.NOOP);
var source = provider.getSource(leafReader.getContext(), docId);
synthetic1 = source.internalSourceRef().utf8ToString();
xContent = source.sourceContentType().xContent();

View file

@ -545,7 +545,7 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
SearchLookup lookup = new SearchLookup(
mapperService::fieldType,
fieldDataLookup(mapperService),
SourceProvider.fromStoredFields()
SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics())
);
ValueFetcher valueFetcher = new DocValueFetcher(format, lookup.getForField(ft, MappedFieldType.FielddataOperation.SEARCH));
IndexSearcher searcher = newSearcher(iw);
@ -566,7 +566,7 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
MappedFieldType ft = mapperService.fieldType("field");
SourceProvider sourceProvider = mapperService.mappingLookup().isSourceSynthetic() ? (ctx, doc) -> {
throw new IllegalArgumentException("Can't load source in scripts in synthetic mode");
} : SourceProvider.fromStoredFields();
} : SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics());
SearchLookup searchLookup = new SearchLookup(null, null, sourceProvider);
IndexFieldData<?> sfd = ft.fielddataBuilder(
new FieldDataContext(
@ -865,7 +865,8 @@ public abstract class MapperTestCase extends MapperServiceTestCase {
ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format);
ParsedDocument doc = mapperService.documentMapper().parse(source);
withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> {
Source s = SourceProvider.fromStoredFields().getSource(ir.leaves().get(0), 0);
Source s = SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics())
.getSource(ir.leaves().get(0), 0);
docValueFetcher.setNextReader(ir.leaves().get(0));
nativeFetcher.setNextReader(ir.leaves().get(0));
List<Object> fromDocValues = docValueFetcher.fetchValues(s, 0, new ArrayList<>());

View file

@ -523,7 +523,7 @@ public abstract class AggregatorTestCase extends ESTestCase {
when(indexShard.shardId()).thenReturn(new ShardId("test", "test", 0));
when(indexShard.indexSettings()).thenReturn(indexSettings);
when(ctx.indexShard()).thenReturn(indexShard);
when(ctx.newSourceLoader()).thenAnswer(inv -> searchExecutionContext.newSourceLoader(false));
when(ctx.newSourceLoader(null)).thenAnswer(inv -> searchExecutionContext.newSourceLoader(null, false));
when(ctx.newIdLoader()).thenReturn(IdLoader.fromLeafStoredFieldLoader());
var res = new SubSearchContext(ctx);
releasables.add(res); // TODO: nasty workaround for not getting the standard resource handling behavior of a real search context

View file

@ -114,7 +114,7 @@ public class HighlighterTestCase extends MapperServiceTestCase {
when(fetchContext.highlight()).thenReturn(search.highlighter().build(context));
when(fetchContext.parsedQuery()).thenReturn(new ParsedQuery(search.query().toQuery(context)));
when(fetchContext.getSearchExecutionContext()).thenReturn(context);
when(fetchContext.sourceLoader()).thenReturn(context.newSourceLoader(false));
when(fetchContext.sourceLoader()).thenReturn(context.newSourceLoader(null, false));
return fetchContext;
}

View file

@ -13,6 +13,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
@ -42,6 +43,7 @@ import org.elasticsearch.search.internal.ScrollContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchContextId;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.lookup.SourceFilter;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rank.context.QueryPhaseRankShardContext;
@ -552,8 +554,8 @@ public class TestSearchContext extends SearchContext {
}
@Override
public SourceLoader newSourceLoader() {
return searchExecutionContext.newSourceLoader(false);
public SourceLoader newSourceLoader(@Nullable SourceFilter filter) {
return searchExecutionContext.newSourceLoader(filter, false);
}
@Override

View file

@ -380,7 +380,7 @@ public class EsPhysicalOperationProviders extends AbstractPhysicalOperationProvi
@Override
public SourceLoader newSourceLoader() {
return ctx.newSourceLoader(false);
return ctx.newSourceLoader(null, false);
}
@Override

View file

@ -167,7 +167,8 @@ public class OffsetSourceFieldMapperTests extends MapperTestCase {
ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format);
ParsedDocument doc = mapperService.documentMapper().parse(source);
withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> {
Source s = SourceProvider.fromStoredFields().getSource(ir.leaves().get(0), 0);
Source s = SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics())
.getSource(ir.leaves().get(0), 0);
nativeFetcher.setNextReader(ir.leaves().get(0));
List<Object> fromNative = nativeFetcher.fetchValues(s, 0, new ArrayList<>());
assertThat(fromNative.size(), equalTo(1));

View file

@ -376,7 +376,8 @@ public class RankVectorsFieldMapperTests extends MapperTestCase {
ValueFetcher nativeFetcher = ft.valueFetcher(searchExecutionContext, format);
ParsedDocument doc = mapperService.documentMapper().parse(source);
withLuceneIndex(mapperService, iw -> iw.addDocuments(doc.docs()), ir -> {
Source s = SourceProvider.fromStoredFields().getSource(ir.leaves().get(0), 0);
Source s = SourceProvider.fromLookup(mapperService.mappingLookup(), null, mapperService.getMapperMetrics().sourceFieldMetrics())
.getSource(ir.leaves().get(0), 0);
nativeFetcher.setNextReader(ir.leaves().get(0));
List<Object> fromNative = nativeFetcher.fetchValues(s, 0, new ArrayList<>());
RankVectorsFieldMapper.RankVectorsFieldType denseVectorFieldType = (RankVectorsFieldMapper.RankVectorsFieldType) ft;