Bust the request cache when the mapping changes (#66295)

This makes sure that we only serve a hit from the request cache if it
was build using the same mapping and that the same mapping is used for
the entire "query phase" of the search.

Closes #62033
This commit is contained in:
Nik Everett 2020-12-23 13:19:02 -05:00 committed by GitHub
parent 11153fcb33
commit 3e3152406a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
47 changed files with 1142 additions and 543 deletions

View file

@ -13,9 +13,17 @@ stream's backing indices.
[source,console]
--------------------------------------------------
POST /my-index-000001/_reload_search_analyzers
POST /my-index-000001/_cache/clear?request=true
--------------------------------------------------
// TEST[setup:my_index]
IMPORTANT: After reloading the search analyzers you should clear the request
cache to make sure it doesn't contain responses derived from the
previous versions of the analyzer.
// the need for this is tracked in https://github.com/elastic/elasticsearch/issues/66722
[discrete]
[[indices-reload-analyzers-api-request]]
=== {api-request-title}

View file

@ -32,14 +32,14 @@ Scripted queries that use the API calls which are non-deterministic, such as
The cache is smart -- it keeps the same _near real-time_ promise as uncached
search.
Cached results are invalidated automatically whenever the shard refreshes, but
only if the data in the shard has actually changed. In other words, you will
always get the same results from the cache as you would for an uncached search
request.
Cached results are invalidated automatically whenever the shard refreshes to
pick up changes to the documents or when you update the mapping. In other
words you will always get the same results from the cache as you would for an
uncached search request.
The longer the refresh interval, the longer that cached entries will remain
valid. If the cache is full, the least recently used cache keys will be
evicted.
valid even if there are changes to the documents. If the cache is full, the
least recently used cache keys will be evicted.
The cache can be expired manually with the <<indices-clearcache,`clear-cache` API>>:

View file

@ -272,3 +272,72 @@ setup:
the_filter:
filters:
filters: []
---
"cache":
- skip:
version: " - 7.99.99"
reason: cache fixed in 8.0.0 to be backported to 7.11.0
- do:
bulk:
refresh: true
body:
- index:
_index: test_1
_id: 100
- int_field: 1
double_field: 1.0
string_field: foo bar
- do:
search:
index: test_1
body:
size: 0
aggs:
f:
filters:
filters:
foo:
match:
string_field: foo
foo_bar:
match:
string_field: foo bar
- match: { hits.total.value: 5 }
- length: { aggregations.f.buckets: 2 }
- match: { aggregations.f.buckets.foo.doc_count: 4 }
- match: { aggregations.f.buckets.foo_bar.doc_count: 1 }
# Modify the mapping configuration that generates queries. This should bust the cache.
- do:
indices.put_mapping:
index: test_1
body:
properties:
string_field:
type: keyword
split_queries_on_whitespace: true
# This should be entirely fresh because updating the mapping busted the cache.
- do:
search:
index: test_1
body:
size: 0
aggs:
f:
filters:
filters:
foo:
match:
string_field: foo
foo_bar:
match:
string_field: foo bar
- match: { hits.total.value: 5 }
- length: { aggregations.f.buckets: 2 }
- match: { aggregations.f.buckets.foo.doc_count: 4 }
- match: { aggregations.f.buckets.foo_bar.doc_count: 4 }

View file

@ -612,6 +612,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
indexCache.bitsetFilterCache(),
indexFieldData::getForField,
mapperService(),
mapperService().mappingLookup(),
similarityService(),
scriptService,
xContentRegistry,

View file

@ -115,7 +115,7 @@ public class DocumentMapper implements ToXContentFragment {
this.documentParser = documentParser;
this.indexSettings = indexSettings;
this.indexAnalyzers = indexAnalyzers;
this.fieldMappers = MappingLookup.fromMapping(this.mapping);
this.fieldMappers = MappingLookup.fromMapping(mapping, this::parse);
try {
mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS);

View file

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import org.elasticsearch.common.regex.Regex;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@ -43,11 +44,16 @@ final class FieldTypeLookup {
* For convenience, the set of copied fields includes the field itself.
*/
private final Map<String, Set<String>> fieldToCopiedFields = new HashMap<>();
private final String type;
private final DynamicKeyFieldTypeLookup dynamicKeyLookup;
FieldTypeLookup(Collection<FieldMapper> fieldMappers,
FieldTypeLookup(
String type,
Collection<FieldMapper> fieldMappers,
Collection<FieldAliasMapper> fieldAliasMappers,
Collection<RuntimeFieldType> runtimeFieldTypes) {
Collection<RuntimeFieldType> runtimeFieldTypes
) {
this.type = type;
Map<String, DynamicKeyFieldMapper> dynamicKeyMappers = new HashMap<>();
for (FieldMapper fieldMapper : fieldMappers) {
@ -89,6 +95,10 @@ final class FieldTypeLookup {
* Returns the mapped field type for the given field name.
*/
MappedFieldType get(String field) {
if (field.equals(TypeFieldType.NAME)) {
return new TypeFieldType(type);
}
MappedFieldType fieldType = fullNameToFieldType.get(field);
if (fieldType != null) {
return fieldType;
@ -103,6 +113,10 @@ final class FieldTypeLookup {
* Returns a list of the full names of a simple match regex like pattern against full name and index name.
*/
Set<String> simpleMatchToFullName(String pattern) {
if (Regex.isSimpleMatchPattern(pattern) == false) {
// no wildcards
return Collections.singleton(pattern);
}
Set<String> fields = new HashSet<>();
for (String field : fullNameToFieldType.keySet()) {
if (Regex.simpleMatch(pattern, field)) {
@ -125,6 +139,9 @@ final class FieldTypeLookup {
* @return A set of paths in the _source that contain the field's values.
*/
Set<String> sourcePaths(String field) {
if (fullNameToFieldType.isEmpty()) {
return Set.of();
}
String resolvedField = field;
int lastDotIndex = field.lastIndexOf('.');
if (lastDotIndex > 0) {

View file

@ -26,7 +26,6 @@ import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.MappingMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
@ -132,7 +131,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
}
public boolean hasNested() {
return this.mapper != null && this.mapper.hasNestedObjects();
return mappingLookup().hasNested();
}
public IndexAnalyzers getIndexAnalyzers() {
@ -399,10 +398,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
* Given the full name of a field, returns its {@link MappedFieldType}.
*/
public MappedFieldType fieldType(String fullName) {
if (fullName.equals(TypeFieldType.NAME)) {
return new TypeFieldType(this.mapper == null ? "_doc" : this.mapper.type());
}
return this.mapper == null ? null : this.mapper.mappers().fieldTypes().get(fullName);
return mappingLookup().fieldTypes().get(fullName);
}
/**
@ -410,19 +406,15 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
* then the fields will be returned with a type prefix.
*/
public Set<String> simpleMatchToFullName(String pattern) {
if (Regex.isSimpleMatchPattern(pattern) == false) {
// no wildcards
return Collections.singleton(pattern);
}
return this.mapper == null ? Collections.emptySet() : this.mapper.mappers().fieldTypes().simpleMatchToFullName(pattern);
return mappingLookup().simpleMatchToFullName(pattern);
}
/**
* Given a field name, returns its possible paths in the _source. For example,
* the 'source path' for a multi-field is the path to its parent field.
* {@code volatile} read a (mostly) immutable snapshot current mapping.
*/
public Set<String> sourcePath(String fullName) {
return this.mapper == null ? Collections.emptySet() : this.mapper.mappers().fieldTypes().sourcePaths(fullName);
public MappingLookup mappingLookup() {
DocumentMapper mapper = this.mapper;
return mapper == null ? MappingLookup.EMPTY : mapper.mappers();
}
/**
@ -444,18 +436,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
* directly associated index-time analyzer
*/
public NamedAnalyzer indexAnalyzer(String field, Function<String, NamedAnalyzer> unindexedFieldAnalyzer) {
if (this.mapper == null) {
return unindexedFieldAnalyzer.apply(field);
}
return this.mapper.mappers().indexAnalyzer(field, unindexedFieldAnalyzer);
}
public boolean containsBrokenAnalysis(String field) {
NamedAnalyzer a = indexAnalyzer(field, f -> null);
if (a == null) {
return false;
}
return a.containsBrokenAnalysis();
return mappingLookup().indexAnalyzer(field, unindexedFieldAnalyzer);
}
@Override
@ -504,6 +485,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
reloadedAnalyzers.add(analyzerName);
}
}
// TODO this should bust the cache somehow. Tracked in https://github.com/elastic/elasticsearch/issues/66722
return reloadedAnalyzers;
}
}

View file

@ -28,10 +28,38 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Stream;
public final class MappingLookup {
/**
* A (mostly) immutable snapshot of the current mapping of an index with
* access to everything we need for the search phase.
*/
public class MappingLookup {
/**
* Key for the lookup to be used in caches.
*/
public static class CacheKey {
private CacheKey() {}
}
/**
* A lookup representing an empty mapping.
*/
public static final MappingLookup EMPTY = new MappingLookup(
"_doc",
List.of(),
List.of(),
List.of(),
List.of(),
0,
soucreToParse -> null,
false
);
private final CacheKey cacheKey = new CacheKey();
/** Full field name to mapper */
private final Map<String, Mapper> fieldMappers;
private final Map<String, ObjectMapper> objectMappers;
@ -39,8 +67,10 @@ public final class MappingLookup {
private final FieldTypeLookup fieldTypeLookup;
private final int metadataFieldCount;
private final Map<String, NamedAnalyzer> indexAnalyzers = new HashMap<>();
private final Function<SourceToParse, ParsedDocument> documentParser;
private final boolean sourceEnabled;
public static MappingLookup fromMapping(Mapping mapping) {
public static MappingLookup fromMapping(Mapping mapping, Function<SourceToParse, ParsedDocument> documentParser) {
List<ObjectMapper> newObjectMappers = new ArrayList<>();
List<FieldMapper> newFieldMappers = new ArrayList<>();
List<FieldAliasMapper> newFieldAliasMappers = new ArrayList<>();
@ -52,8 +82,16 @@ public final class MappingLookup {
for (Mapper child : mapping.root) {
collect(child, newObjectMappers, newFieldMappers, newFieldAliasMappers);
}
return new MappingLookup(newFieldMappers, newObjectMappers, newFieldAliasMappers,
mapping.root.runtimeFieldTypes(), mapping.metadataMappers.length);
return new MappingLookup(
mapping.root().name(),
newFieldMappers,
newObjectMappers,
newFieldAliasMappers,
mapping.root.runtimeFieldTypes(),
mapping.metadataMappers.length,
documentParser,
mapping.metadataMapper(SourceFieldMapper.class).enabled()
);
}
private static void collect(Mapper mapper, Collection<ObjectMapper> objectMappers,
@ -74,11 +112,16 @@ public final class MappingLookup {
}
}
public MappingLookup(Collection<FieldMapper> mappers,
public MappingLookup(String type,
Collection<FieldMapper> mappers,
Collection<ObjectMapper> objectMappers,
Collection<FieldAliasMapper> aliasMappers,
Collection<RuntimeFieldType> runtimeFieldTypes,
int metadataFieldCount) {
int metadataFieldCount,
Function<SourceToParse, ParsedDocument> documentParser,
boolean sourceEnabled) {
this.documentParser = documentParser;
this.sourceEnabled = sourceEnabled;
Map<String, Mapper> fieldMappers = new HashMap<>();
Map<String, ObjectMapper> objects = new HashMap<>();
@ -113,7 +156,7 @@ public final class MappingLookup {
}
}
this.fieldTypeLookup = new FieldTypeLookup(mappers, aliasMappers, runtimeFieldTypes);
this.fieldTypeLookup = new FieldTypeLookup(type, mappers, aliasMappers, runtimeFieldTypes);
this.fieldMappers = Collections.unmodifiableMap(fieldMappers);
this.objectMappers = Collections.unmodifiableMap(objects);
@ -147,7 +190,7 @@ public final class MappingLookup {
return fieldMappers.values();
}
public void checkLimits(IndexSettings settings) {
void checkLimits(IndexSettings settings) {
checkFieldLimit(settings.getMappingTotalFieldsLimit());
checkObjectDepthLimit(settings.getMappingDepthLimit());
checkFieldNameLengthLimit(settings.getMappingFieldNameLengthLimit());
@ -234,4 +277,50 @@ public final class MappingLookup {
}
return field.substring(0, lastDot);
}
public Set<String> simpleMatchToFullName(String pattern) {
return fieldTypes().simpleMatchToFullName(pattern);
}
/**
* Returns the mapped field type for the given field name.
*/
public MappedFieldType getFieldType(String field) {
return fieldTypes().get(field);
}
/**
* Given a concrete field name, return its paths in the _source.
*
* For most fields, the source path is the same as the field itself. However
* there are cases where a field's values are found elsewhere in the _source:
* - For a multi-field, the source path is the parent field.
* - One field's content could have been copied to another through copy_to.
*
* @param field The field for which to look up the _source path. Note that the field
* should be a concrete field and *not* an alias.
* @return A set of paths in the _source that contain the field's values.
*/
public Set<String> sourcePaths(String field) {
return fieldTypes().sourcePaths(field);
}
public ParsedDocument parseDocument(SourceToParse source) {
return documentParser.apply(source);
}
public boolean hasMappings() {
return this != EMPTY;
}
public boolean isSourceEnabled() {
return sourceEnabled;
}
/**
* Key for the lookup to be used in caches.
*/
public CacheKey cacheKey() {
return cacheKey;
}
}

View file

@ -52,6 +52,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.RuntimeFieldType;
@ -90,6 +91,7 @@ public class QueryShardContext extends QueryRewriteContext {
private final IndexSettings indexSettings;
private final BigArrays bigArrays;
private final MapperService mapperService;
private final MappingLookup mappingLookup;
private final SimilarityService similarityService;
private final BitsetFilterCache bitsetFilterCache;
private final TriFunction<MappedFieldType, String, Supplier<SearchLookup>, IndexFieldData<?>> indexFieldDataService;
@ -121,6 +123,7 @@ public class QueryShardContext extends QueryRewriteContext {
BitsetFilterCache bitsetFilterCache,
TriFunction<MappedFieldType, String, Supplier<SearchLookup>, IndexFieldData<?>> indexFieldDataLookup,
MapperService mapperService,
MappingLookup mappingLookup,
SimilarityService similarityService,
ScriptService scriptService,
NamedXContentRegistry xContentRegistry,
@ -142,6 +145,7 @@ public class QueryShardContext extends QueryRewriteContext {
bitsetFilterCache,
indexFieldDataLookup,
mapperService,
mappingLookup,
similarityService,
scriptService,
xContentRegistry,
@ -169,6 +173,7 @@ public class QueryShardContext extends QueryRewriteContext {
source.bitsetFilterCache,
source.indexFieldDataService,
source.mapperService,
source.mappingLookup,
source.similarityService,
source.scriptService,
source.getXContentRegistry(),
@ -190,6 +195,7 @@ public class QueryShardContext extends QueryRewriteContext {
BitsetFilterCache bitsetFilterCache,
TriFunction<MappedFieldType, String, Supplier<SearchLookup>, IndexFieldData<?>> indexFieldDataLookup,
MapperService mapperService,
MappingLookup mappingLookup,
SimilarityService similarityService,
ScriptService scriptService,
NamedXContentRegistry xContentRegistry,
@ -207,6 +213,7 @@ public class QueryShardContext extends QueryRewriteContext {
this.shardRequestIndex = shardRequestIndex;
this.similarityService = similarityService;
this.mapperService = mapperService;
this.mappingLookup = mappingLookup;
this.bigArrays = bigArrays;
this.bitsetFilterCache = bitsetFilterCache;
this.indexFieldDataService = indexFieldDataLookup;
@ -230,7 +237,7 @@ public class QueryShardContext extends QueryRewriteContext {
}
public Similarity getSearchSimilarity() {
return similarityService != null ? similarityService.similarity(mapperService::fieldType) : null;
return similarityService != null ? similarityService.similarity(this::fieldType) : null;
}
public List<String> defaultFields() {
@ -274,16 +281,19 @@ public class QueryShardContext extends QueryRewriteContext {
return Map.copyOf(namedQueries);
}
/**
* Parse a document with current mapping.
*/
public ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException {
return mapperService.documentMapper() == null ? null : mapperService.documentMapper().parse(source);
return mappingLookup.parseDocument(source);
}
public boolean hasNested() {
return mapperService.hasNested();
return mappingLookup.hasNested();
}
public boolean hasMappings() {
return mapperService.documentMapper() != null;
return mappingLookup.hasMappings();
}
/**
@ -292,13 +302,13 @@ public class QueryShardContext extends QueryRewriteContext {
*/
public Set<String> simpleMatchToIndexNames(String pattern) {
if (runtimeMappings.isEmpty()) {
return mapperService.simpleMatchToFullName(pattern);
return mappingLookup.simpleMatchToFullName(pattern);
}
if (Regex.isSimpleMatchPattern(pattern) == false) {
// no wildcards
return Collections.singleton(pattern);
}
Set<String> matches = new HashSet<>(mapperService.simpleMatchToFullName(pattern));
Set<String> matches = new HashSet<>(mappingLookup.simpleMatchToFullName(pattern));
for (String name : runtimeMappings.keySet()) {
if (Regex.simpleMatch(pattern, name)) {
matches.add(name);
@ -329,11 +339,11 @@ public class QueryShardContext extends QueryRewriteContext {
private MappedFieldType fieldType(String name) {
MappedFieldType fieldType = runtimeMappings.get(name);
return fieldType == null ? mapperService.fieldType(name) : fieldType;
return fieldType == null ? mappingLookup.getFieldType(name) : fieldType;
}
public ObjectMapper getObjectMapper(String name) {
return mapperService.getObjectMapper(name);
return mappingLookup.objectMappers().get(name);
}
public boolean isMetadataField(String field) {
@ -341,11 +351,11 @@ public class QueryShardContext extends QueryRewriteContext {
}
public Set<String> sourcePath(String fullName) {
return mapperService.sourcePath(fullName);
return mappingLookup.sourcePaths(fullName);
}
public boolean isSourceEnabled() {
return mapperService.documentMapper().sourceMapper().enabled();
return mappingLookup.isSourceEnabled();
}
/**
@ -378,7 +388,7 @@ public class QueryShardContext extends QueryRewriteContext {
return new DelegatingAnalyzerWrapper(Analyzer.PER_FIELD_REUSE_STRATEGY) {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
return mapperService.indexAnalyzer(fieldName, unindexedFieldAnalyzer);
return mappingLookup.indexAnalyzer(fieldName, unindexedFieldAnalyzer);
}
};
}
@ -399,8 +409,7 @@ public class QueryShardContext extends QueryRewriteContext {
if (fieldMapping != null || allowUnmappedFields) {
return fieldMapping;
} else if (mapUnmappedFieldAsString) {
TextFieldMapper.Builder builder
= new TextFieldMapper.Builder(name, mapperService.getIndexAnalyzers());
TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name, getIndexAnalyzers());
return builder.build(new ContentPath(1)).fieldType();
} else {
throw new QueryShardException(this, "No field mapping can be found for the field with name [{}]", name);
@ -412,7 +421,8 @@ public class QueryShardContext extends QueryRewriteContext {
* backwards offsets in term vectors
*/
public boolean containsBrokenAnalysis(String field) {
return mapperService.containsBrokenAnalysis(field);
NamedAnalyzer a = mappingLookup.indexAnalyzer(field, f -> null);
return a == null ? false : a.containsBrokenAnalysis();
}
private SearchLookup lookup = null;
@ -603,10 +613,7 @@ public class QueryShardContext extends QueryRewriteContext {
return bigArrays;
}
private static Map<String, MappedFieldType> parseRuntimeMappings(
Map<String, Object> runtimeMappings,
MapperService mapperService
) {
private static Map<String, MappedFieldType> parseRuntimeMappings(Map<String, Object> runtimeMappings, MapperService mapperService) {
Map<String, MappedFieldType> runtimeFieldTypes = new HashMap<>();
if (runtimeMappings.isEmpty() == false) {
RuntimeFieldType.parseRuntimeFields(new HashMap<>(runtimeMappings), mapperService.parserContext(),
@ -614,4 +621,11 @@ public class QueryShardContext extends QueryRewriteContext {
}
return Collections.unmodifiableMap(runtimeFieldTypes);
}
/**
* Cache key for current mapping.
*/
public MappingLookup.CacheKey mappingCacheKey() {
return mappingLookup.cacheKey();
}
}

View file

@ -41,6 +41,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.mapper.MappingLookup;
import java.io.Closeable;
import java.io.IOException;
@ -113,9 +114,9 @@ public final class IndicesRequestCache implements RemovalListener<IndicesRequest
}
BytesReference getOrCompute(CacheEntity cacheEntity, CheckedSupplier<BytesReference, IOException> loader,
DirectoryReader reader, BytesReference cacheKey) throws Exception {
MappingLookup.CacheKey mappingCacheKey, DirectoryReader reader, BytesReference cacheKey) throws Exception {
assert reader.getReaderCacheHelper() != null;
final Key key = new Key(cacheEntity, reader.getReaderCacheHelper().getKey(), cacheKey);
final Key key = new Key(cacheEntity, mappingCacheKey, reader.getReaderCacheHelper().getKey(), cacheKey);
Loader cacheLoader = new Loader(cacheEntity, loader);
BytesReference value = cache.computeIfAbsent(key, cacheLoader);
if (cacheLoader.isLoaded()) {
@ -128,6 +129,15 @@ public final class IndicesRequestCache implements RemovalListener<IndicesRequest
ElasticsearchDirectoryReader.addReaderCloseListener(reader, cleanupKey);
}
}
/*
* Note that we don't use a closed listener for the mapping. Instead
* we let cache entries for out of date mappings age out. We do this
* because we don't reference count the MappingLookup so we can't tell
* when one is no longer used. Mapping updates should be a lot less
* frequent than reader closes so this is probably ok. On the other
* hand, for read only indices mapping changes are, well, possible,
* and readers are never changed. Oh well.
*/
} else {
key.entity.onHit();
}
@ -140,9 +150,9 @@ public final class IndicesRequestCache implements RemovalListener<IndicesRequest
* @param reader the reader to invalidate the cache entry for
* @param cacheKey the cache key to invalidate
*/
void invalidate(CacheEntity cacheEntity, DirectoryReader reader, BytesReference cacheKey) {
void invalidate(CacheEntity cacheEntity, MappingLookup.CacheKey mappingCacheKey, DirectoryReader reader, BytesReference cacheKey) {
assert reader.getReaderCacheHelper() != null;
cache.invalidate(new Key(cacheEntity, reader.getReaderCacheHelper().getKey(), cacheKey));
cache.invalidate(new Key(cacheEntity, mappingCacheKey, reader.getReaderCacheHelper().getKey(), cacheKey));
}
private static class Loader implements CacheLoader<Key, BytesReference> {
@ -211,11 +221,13 @@ public final class IndicesRequestCache implements RemovalListener<IndicesRequest
private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(Key.class);
public final CacheEntity entity; // use as identity equality
public final MappingLookup.CacheKey mappingCacheKey;
public final IndexReader.CacheKey readerCacheKey;
public final BytesReference value;
Key(CacheEntity entity, IndexReader.CacheKey readerCacheKey, BytesReference value) {
Key(CacheEntity entity, MappingLookup.CacheKey mappingCacheKey, IndexReader.CacheKey readerCacheKey, BytesReference value) {
this.entity = entity;
this.mappingCacheKey = Objects.requireNonNull(mappingCacheKey);
this.readerCacheKey = Objects.requireNonNull(readerCacheKey);
this.value = value;
}
@ -236,7 +248,8 @@ public final class IndicesRequestCache implements RemovalListener<IndicesRequest
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Key key = (Key) o;
if (Objects.equals(readerCacheKey, key.readerCacheKey) == false) return false;
if (mappingCacheKey.equals(key.mappingCacheKey) == false) return false;
if (readerCacheKey.equals(key.readerCacheKey) == false) return false;
if (!entity.getCacheIdentity().equals(key.entity.getCacheIdentity())) return false;
if (!value.equals(key.value)) return false;
return true;
@ -245,6 +258,7 @@ public final class IndicesRequestCache implements RemovalListener<IndicesRequest
@Override
public int hashCode() {
int result = entity.getCacheIdentity().hashCode();
result = 31 * result + mappingCacheKey.hashCode();
result = 31 * result + readerCacheKey.hashCode();
result = 31 * result + value.hashCode();
return result;

View file

@ -101,6 +101,7 @@ import org.elasticsearch.index.get.GetStats;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.merge.MergeStats;
import org.elasticsearch.index.query.CoordinatorRewriteContextProvider;
import org.elasticsearch.index.query.QueryBuilder;
@ -1408,12 +1409,18 @@ public class IndicesService extends AbstractLifecycleComponent
final DirectoryReader directoryReader = context.searcher().getDirectoryReader();
boolean[] loadedFromCache = new boolean[] { true };
BytesReference bytesReference = cacheShardLevelResult(context.indexShard(), directoryReader, request.cacheKey(),
BytesReference cacheKey = request.cacheKey();
BytesReference bytesReference = cacheShardLevelResult(
context.indexShard(),
context.getQueryShardContext().mappingCacheKey(),
directoryReader,
cacheKey,
out -> {
queryPhase.execute(context);
context.queryResult().writeToNoId(out);
loadedFromCache[0] = false;
});
}
);
if (loadedFromCache[0]) {
// restore the cached query result into the context
@ -1429,7 +1436,12 @@ public class IndicesService extends AbstractLifecycleComponent
// key invalidate the result in the thread that caused the timeout. This will end up to be simpler and eventually correct since
// running a search that times out concurrently will likely timeout again if it's run while we have this `stale` result in the
// cache. One other option is to not cache requests with a timeout at all...
indicesRequestCache.invalidate(new IndexShardCacheEntity(context.indexShard()), directoryReader, request.cacheKey());
indicesRequestCache.invalidate(
new IndexShardCacheEntity(context.indexShard()),
context.getQueryShardContext().mappingCacheKey(),
directoryReader,
cacheKey
);
if (logger.isTraceEnabled()) {
logger.trace("Query timed out, invalidating cache entry for request on shard [{}]:\n {}", request.shardId(),
request.source());
@ -1449,8 +1461,13 @@ public class IndicesService extends AbstractLifecycleComponent
* @param loader loads the data into the cache if needed
* @return the contents of the cache or the result of calling the loader
*/
private BytesReference cacheShardLevelResult(IndexShard shard, DirectoryReader reader, BytesReference cacheKey,
CheckedConsumer<StreamOutput, IOException> loader) throws Exception {
private BytesReference cacheShardLevelResult(
IndexShard shard,
MappingLookup.CacheKey mappingCacheKey,
DirectoryReader reader,
BytesReference cacheKey,
CheckedConsumer<StreamOutput, IOException> loader
) throws Exception {
IndexShardCacheEntity cacheEntity = new IndexShardCacheEntity(shard);
CheckedSupplier<BytesReference, IOException> supplier = () -> {
/* BytesStreamOutput allows to pass the expected size but by default uses
@ -1468,7 +1485,7 @@ public class IndicesService extends AbstractLifecycleComponent
return out.bytes();
}
};
return indicesRequestCache.getOrCompute(cacheEntity, supplier, reader, cacheKey);
return indicesRequestCache.getOrCompute(cacheEntity, supplier, mappingCacheKey, reader, cacheKey);
}
static final class IndexShardCacheEntity extends AbstractIndexShardCacheEntity {

View file

@ -562,8 +562,16 @@ public class MetadataRolloverServiceTests extends ESTestCase {
return null;
}
};
MappingLookup mappingLookup =
new MappingLookup(List.of(mockedTimestampField, dateFieldMapper), List.of(), List.of(), List.of(), 0);
MappingLookup mappingLookup = new MappingLookup(
"_doc",
List.of(mockedTimestampField, dateFieldMapper),
List.of(),
List.of(),
List.of(),
0,
null,
false
);
ClusterService clusterService = ClusterServiceUtils.createClusterService(testThreadPool);
Environment env = mock(Environment.class);

View file

@ -125,7 +125,7 @@ public class MetadataCreateIndexServiceTests extends ESTestCase {
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1).build();
queryShardContext = new QueryShardContext(0, 0,
new IndexSettings(IndexMetadata.builder("test").settings(indexSettings).build(), indexSettings),
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(),
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null, xContentRegistry(), writableRegistry(),
null, null, () -> randomNonNegativeLong(), null, null, () -> true, null, emptyMap());
}

View file

@ -167,7 +167,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1).build();
QueryShardContext context = new QueryShardContext(0, 0,
new IndexSettings(IndexMetadata.builder("foo").settings(indexSettings).build(), indexSettings),
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null,
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null,
xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null, () -> true, null, emptyMap());
MappedFieldType ft = new DateFieldType("field");
String date = "2015-10-12T14:10:55";
@ -189,7 +189,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 1).build();
QueryShardContext context = new QueryShardContext(0, 0,
new IndexSettings(IndexMetadata.builder("foo").settings(indexSettings).build(), indexSettings),
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(),
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null, xContentRegistry(), writableRegistry(),
null, null, () -> nowInMillis, null, null, () -> true, null, emptyMap());
MappedFieldType ft = new DateFieldType("field");
String date1 = "2015-10-12T14:10:55";
@ -233,7 +233,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase {
IndexSettings indexSettings = new IndexSettings(indexMetadata, settings);
QueryShardContext context = new QueryShardContext(0, 0, indexSettings,
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(),
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null, xContentRegistry(), writableRegistry(),
null, null, () -> 0L, null, null, () -> true, null, emptyMap());
MappedFieldType ft = new DateFieldType("field");

View file

@ -1,139 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.io.StringReader;
import java.util.Arrays;
import java.util.Collections;
public class DocumentFieldMapperTests extends LuceneTestCase {
private static class FakeAnalyzer extends Analyzer {
private final String output;
FakeAnalyzer(String output) {
this.output = output;
}
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new Tokenizer() {
boolean incremented = false;
final CharTermAttribute term = addAttribute(CharTermAttribute.class);
@Override
public boolean incrementToken() {
if (incremented) {
return false;
}
term.setLength(0).append(output);
incremented = true;
return true;
}
};
return new TokenStreamComponents(tokenizer);
}
}
static class FakeFieldType extends TermBasedFieldType {
private FakeFieldType(String name) {
super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap());
}
@Override
public ValueFetcher valueFetcher(QueryShardContext context, String format) {
throw new UnsupportedOperationException();
}
@Override
public String typeName() {
return "fake";
}
}
static class FakeFieldMapper extends FieldMapper {
final String indexedValue;
FakeFieldMapper(FakeFieldType fieldType, String indexedValue) {
super(fieldType.name(), fieldType,
new NamedAnalyzer("fake", AnalyzerScope.INDEX, new FakeAnalyzer(indexedValue)),
MultiFields.empty(), CopyTo.empty());
this.indexedValue = indexedValue;
}
@Override
protected void parseCreateField(ParseContext context) {
}
@Override
protected String contentType() {
return null;
}
@Override
public Builder getMergeBuilder() {
return null;
}
}
public void testAnalyzers() throws IOException {
FakeFieldType fieldType1 = new FakeFieldType("field1");
FieldMapper fieldMapper1 = new FakeFieldMapper(fieldType1, "index1");
FakeFieldType fieldType2 = new FakeFieldType("field2");
FieldMapper fieldMapper2 = new FakeFieldMapper(fieldType2, "index2");
MappingLookup mappingLookup = new MappingLookup(
Arrays.asList(fieldMapper1, fieldMapper2),
Collections.emptyList(),
Collections.emptyList(),
Collections.emptyList(),
0);
assertAnalyzes(mappingLookup.indexAnalyzer("field1", f -> null), "field1", "index1");
assertAnalyzes(mappingLookup.indexAnalyzer("field2", f -> null), "field2", "index2");
expectThrows(IllegalArgumentException.class,
() -> mappingLookup.indexAnalyzer("field3", f -> {
throw new IllegalArgumentException();
}).tokenStream("field3", "blah"));
}
private void assertAnalyzes(Analyzer analyzer, String field, String output) throws IOException {
try (TokenStream tok = analyzer.tokenStream(field, new StringReader(""))) {
CharTermAttribute term = tok.addAttribute(CharTermAttribute.class);
assertTrue(tok.incrementToken());
assertEquals(output, term.toString());
}
}
}

View file

@ -36,13 +36,19 @@ public class FieldAliasMapperValidationTests extends ESTestCase {
ObjectMapper objectMapper = createObjectMapper("some.path");
FieldAliasMapper aliasMapper = new FieldAliasMapper("path", "some.path", "field");
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
new MappingLookup(
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> new MappingLookup(
"_doc",
Collections.emptyList(),
singletonList(objectMapper),
singletonList(aliasMapper),
emptyList(),
0));
0,
null,
false
)
);
assertEquals("Alias [some.path] is defined both as an object and an alias", e.getMessage());
}
@ -51,14 +57,19 @@ public class FieldAliasMapperValidationTests extends ESTestCase {
FieldMapper invalidField = new MockFieldMapper("invalid");
FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid", "invalid", "field");
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
new MappingLookup(
MapperParsingException e = expectThrows(
MapperParsingException.class,
() -> new MappingLookup(
"_doc",
Arrays.asList(field, invalidField),
emptyList(),
singletonList(invalidAlias),
emptyList(),
0));
0,
null,
false
)
);
assertEquals("Alias [invalid] is defined both as an alias and a concrete field", e.getMessage());
}
@ -68,11 +79,15 @@ public class FieldAliasMapperValidationTests extends ESTestCase {
FieldAliasMapper invalidAlias = new FieldAliasMapper("invalid-alias", "invalid-alias", "alias");
MappingLookup mappers = new MappingLookup(
"_doc",
singletonList(field),
emptyList(),
Arrays.asList(alias, invalidAlias),
emptyList(),
0);
0,
null,
false
);
alias.validate(mappers);
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
@ -88,11 +103,15 @@ public class FieldAliasMapperValidationTests extends ESTestCase {
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
MappingLookup mappers = new MappingLookup(
"_doc",
emptyList(),
emptyList(),
singletonList(invalidAlias),
emptyList(),
0);
0,
null,
false
);
invalidAlias.validate(mappers);
});
@ -105,11 +124,15 @@ public class FieldAliasMapperValidationTests extends ESTestCase {
MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
MappingLookup mappers = new MappingLookup(
"_doc",
emptyList(),
emptyList(),
singletonList(invalidAlias),
emptyList(),
0);
0,
null,
false
);
invalidAlias.validate(mappers);
});
@ -122,11 +145,15 @@ public class FieldAliasMapperValidationTests extends ESTestCase {
FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "nested.alias", "nested.field");
MappingLookup mappers = new MappingLookup(
"_doc",
singletonList(createFieldMapper("nested", "field")),
singletonList(objectMapper),
singletonList(aliasMapper),
emptyList(),
0);
0,
null,
false
);
aliasMapper.validate(mappers);
}
@ -135,11 +162,15 @@ public class FieldAliasMapperValidationTests extends ESTestCase {
FieldAliasMapper aliasMapper = new FieldAliasMapper("alias", "object2.alias", "object1.field");
MappingLookup mappers = new MappingLookup(
"_doc",
List.of(createFieldMapper("object1", "field")),
List.of(createObjectMapper("object1"), createObjectMapper("object2")),
singletonList(aliasMapper),
emptyList(),
0);
0,
null,
false
);
aliasMapper.validate(mappers);
}
@ -149,11 +180,15 @@ public class FieldAliasMapperValidationTests extends ESTestCase {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
MappingLookup mappers = new MappingLookup(
"_doc",
singletonList(createFieldMapper("nested", "field")),
Collections.singletonList(objectMapper),
singletonList(aliasMapper),
emptyList(),
0);
0,
null,
false
);
aliasMapper.validate(mappers);
});
@ -168,11 +203,15 @@ public class FieldAliasMapperValidationTests extends ESTestCase {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
MappingLookup mappers = new MappingLookup(
"_doc",
singletonList(createFieldMapper("nested1", "field")),
List.of(createNestedObjectMapper("nested1"), createNestedObjectMapper("nested2")),
singletonList(aliasMapper),
emptyList(),
0);
0,
null,
false
);
aliasMapper.validate(mappers);
});

View file

@ -29,11 +29,7 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.test.ESTestCase;
import java.util.Collections;
import static java.util.Collections.emptyMap;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class FieldNamesFieldTypeTests extends ESTestCase {
@ -45,13 +41,10 @@ public class FieldNamesFieldTypeTests extends ESTestCase {
Settings settings = settings(Version.CURRENT).build();
IndexSettings indexSettings = new IndexSettings(
new IndexMetadata.Builder("foo").settings(settings).numberOfShards(1).numberOfReplicas(0).build(), settings);
MapperService mapperService = mock(MapperService.class);
when(mapperService.fieldType("_field_names")).thenReturn(fieldNamesFieldType);
when(mapperService.fieldType("field_name")).thenReturn(fieldType);
when(mapperService.simpleMatchToFullName("field_name")).thenReturn(Collections.singleton("field_name"));
MappingLookup mappingLookup = MappingLookupUtils.fromTypes(fieldNamesFieldType, fieldType);
QueryShardContext queryShardContext = new QueryShardContext(0, 0,
indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, mapperService,
indexSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, mappingLookup,
null, null, null, null, null, null, () -> 0L, null, null, () -> true, null, emptyMap());
Query termQuery = fieldNamesFieldType.termQuery("field_name", queryShardContext);
assertEquals(new TermQuery(new Term(FieldNamesFieldMapper.CONTENT_TYPE, "field_name")), termQuery);

View file

@ -30,15 +30,16 @@ import java.util.Set;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.equalTo;
public class FieldTypeLookupTests extends ESTestCase {
public void testEmpty() {
FieldTypeLookup lookup = new FieldTypeLookup(Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
FieldTypeLookup lookup = new FieldTypeLookup("_doc", Collections.emptyList(), Collections.emptyList(), Collections.emptyList());
assertNull(lookup.get("foo"));
Collection<String> names = lookup.simpleMatchToFullName("foo");
assertNotNull(names);
assertTrue(names.isEmpty());
assertThat(names, equalTo(Set.of("foo")));
assertEquals(0, size(lookup.filter(ft -> true)));
}
@ -47,7 +48,7 @@ public class FieldTypeLookupTests extends ESTestCase {
Collection<FieldAliasMapper> fieldAliases = singletonList(new FieldAliasMapper("alias", "alias", "test"));
Collection<RuntimeFieldType> runtimeFields = List.of(
new TestRuntimeField("runtime", "type"), new TestRuntimeField("field", "type"));
FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(fieldMappers, fieldAliases, runtimeFields);
FieldTypeLookup fieldTypeLookup = new FieldTypeLookup("_doc", fieldMappers, fieldAliases, runtimeFields);
assertEquals(3, size(fieldTypeLookup.filter(ft -> true)));
for (MappedFieldType fieldType : fieldTypeLookup.filter(ft -> true)) {
if (fieldType.name().equals("test")) {
@ -76,7 +77,7 @@ public class FieldTypeLookupTests extends ESTestCase {
public void testAddNewField() {
MockFieldMapper f = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(Collections.singletonList(f), emptyList(), Collections.emptyList());
FieldTypeLookup lookup = new FieldTypeLookup("_doc", Collections.singletonList(f), emptyList(), Collections.emptyList());
assertNull(lookup.get("bar"));
assertEquals(f.fieldType(), lookup.get("foo"));
assertEquals(1, size(lookup.filter(ft -> true)));
@ -86,7 +87,7 @@ public class FieldTypeLookupTests extends ESTestCase {
MockFieldMapper field = new MockFieldMapper("foo");
FieldAliasMapper alias = new FieldAliasMapper("alias", "alias", "foo");
FieldTypeLookup lookup = new FieldTypeLookup(Collections.singletonList(field), Collections.singletonList(alias),
FieldTypeLookup lookup = new FieldTypeLookup("_doc", Collections.singletonList(field), Collections.singletonList(alias),
Collections.emptyList());
MappedFieldType aliasType = lookup.get("alias");
@ -100,7 +101,7 @@ public class FieldTypeLookupTests extends ESTestCase {
FieldAliasMapper alias1 = new FieldAliasMapper("food", "food", "foo");
FieldAliasMapper alias2 = new FieldAliasMapper("barometer", "barometer", "bar");
FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(field1, field2), Arrays.asList(alias1, alias2), Collections.emptyList());
FieldTypeLookup lookup = new FieldTypeLookup("_doc", List.of(field1, field2), List.of(alias1, alias2), List.of());
Collection<String> names = lookup.simpleMatchToFullName("b*");
@ -117,7 +118,7 @@ public class FieldTypeLookupTests extends ESTestCase {
.addMultiField(new MockFieldMapper.Builder("field.subfield2"))
.build(new ContentPath());
FieldTypeLookup lookup = new FieldTypeLookup(singletonList(field), emptyList(), emptyList());
FieldTypeLookup lookup = new FieldTypeLookup("_doc", singletonList(field), emptyList(), emptyList());
assertEquals(Set.of("field"), lookup.sourcePaths("field"));
assertEquals(Set.of("field"), lookup.sourcePaths("field.subfield1"));
@ -133,17 +134,25 @@ public class FieldTypeLookupTests extends ESTestCase {
.copyTo("field")
.build(new ContentPath());
FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(field, otherField), emptyList(), emptyList());
FieldTypeLookup lookup = new FieldTypeLookup("_doc", Arrays.asList(field, otherField), emptyList(), emptyList());
assertEquals(Set.of("other_field", "field"), lookup.sourcePaths("field"));
assertEquals(Set.of("other_field", "field"), lookup.sourcePaths("field.subfield1"));
}
public void testTypeLookup() {
String type = randomAlphaOfLength(4);
assertThat(
((TypeFieldType) new FieldTypeLookup(type, List.of(), List.of(), List.of()).get(TypeFieldType.NAME)).getType(),
equalTo(type)
);
}
public void testRuntimeFieldsLookup() {
MockFieldMapper concrete = new MockFieldMapper("concrete");
TestRuntimeField runtime = new TestRuntimeField("runtime", "type");
FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(List.of(concrete), emptyList(), List.of(runtime));
FieldTypeLookup fieldTypeLookup = new FieldTypeLookup("_doc", List.of(concrete), emptyList(), List.of(runtime));
assertThat(fieldTypeLookup.get("concrete"), instanceOf(MockFieldMapper.FakeFieldType.class));
assertThat(fieldTypeLookup.get("runtime"), instanceOf(TestRuntimeField.class));
assertEquals(2, size(fieldTypeLookup.filter(ft -> true)));
@ -157,7 +166,7 @@ public class FieldTypeLookupTests extends ESTestCase {
TestRuntimeField subfieldOverride = new TestRuntimeField("object.subfield", "type");
TestRuntimeField runtime = new TestRuntimeField("runtime", "type");
FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(List.of(field, concrete, subfield), emptyList(),
FieldTypeLookup fieldTypeLookup = new FieldTypeLookup("_doc", List.of(field, concrete, subfield), emptyList(),
List.of(fieldOverride, runtime, subfieldOverride));
assertThat(fieldTypeLookup.get("field"), instanceOf(TestRuntimeField.class));
assertThat(fieldTypeLookup.get("object.subfield"), instanceOf(TestRuntimeField.class));
@ -172,7 +181,7 @@ public class FieldTypeLookupTests extends ESTestCase {
TestRuntimeField field2 = new TestRuntimeField("field2", "type");
TestRuntimeField subfield = new TestRuntimeField("object.subfield", "type");
FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(List.of(field1, concrete), emptyList(), List.of(field2, subfield));
FieldTypeLookup fieldTypeLookup = new FieldTypeLookup("_doc", List.of(field1, concrete), emptyList(), List.of(field2, subfield));
{
Set<String> matches = fieldTypeLookup.simpleMatchToFullName("fie*");
assertEquals(2, matches.size());
@ -194,7 +203,7 @@ public class FieldTypeLookupTests extends ESTestCase {
TestRuntimeField field2 = new TestRuntimeField("field2", "type");
TestRuntimeField subfield = new TestRuntimeField("object.subfield", "type");
FieldTypeLookup fieldTypeLookup = new FieldTypeLookup(List.of(field1, concrete), emptyList(), List.of(field2, subfield));
FieldTypeLookup fieldTypeLookup = new FieldTypeLookup("_doc", List.of(field1, concrete), emptyList(), List.of(field2, subfield));
{
Set<String> sourcePaths = fieldTypeLookup.sourcePaths("field1");
assertEquals(1, sourcePaths.size());

View file

@ -69,7 +69,7 @@ public class IndexFieldTypeTests extends ESTestCase {
IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY);
Predicate<String> indexNameMatcher = pattern -> Regex.simpleMatch(pattern, "index");
return new QueryShardContext(0, 0, indexSettings, null, null, null, null, null, null, xContentRegistry(), writableRegistry(),
return new QueryShardContext(0, 0, indexSettings, null, null, null, null, null, null, null, xContentRegistry(), writableRegistry(),
null, null, System::currentTimeMillis, null, indexNameMatcher, () -> true, null, emptyMap());
}
}

View file

@ -33,6 +33,7 @@ import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@ -46,6 +47,17 @@ public class MapperServiceTests extends MapperServiceTestCase {
assertThat("field was not created by mapping update", mapperService.fieldType("field0"), notNullValue());
}
public void testMappingLookup() throws IOException {
MapperService service = createMapperService(mapping(b -> {}));
MappingLookup oldLookup = service.mappingLookup();
assertThat(oldLookup.fieldTypes().get("cat"), nullValue());
merge(service, mapping(b -> b.startObject("cat").field("type", "keyword").endObject()));
MappingLookup newLookup = service.mappingLookup();
assertThat(newLookup.fieldTypes().get("cat"), not(nullValue()));
assertThat(oldLookup.fieldTypes().get("cat"), nullValue());
}
/**
* Test that we can have at least the number of fields in new mappings that are defined by "index.mapping.total_fields.limit".
* Any additional field should trigger an IllegalArgumentException.

View file

@ -19,10 +19,20 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.io.StringReader;
import java.util.Arrays;
import java.util.Collections;
import static org.hamcrest.CoreMatchers.instanceOf;
@ -30,8 +40,8 @@ import static org.hamcrest.CoreMatchers.instanceOf;
public class MappingLookupTests extends ESTestCase {
public void testOnlyRuntimeField() {
MappingLookup mappingLookup = new MappingLookup(Collections.emptyList(), Collections.emptyList(), Collections.emptyList(),
Collections.singletonList(new TestRuntimeField("test", "type")), 0);
MappingLookup mappingLookup = new MappingLookup("_doc", Collections.emptyList(), Collections.emptyList(), Collections.emptyList(),
Collections.singletonList(new TestRuntimeField("test", "type")), 0, null, false);
assertEquals(0, size(mappingLookup.fieldMappers()));
assertEquals(0, mappingLookup.objectMappers().size());
assertNull(mappingLookup.getMapper("test"));
@ -40,8 +50,8 @@ public class MappingLookupTests extends ESTestCase {
public void testRuntimeFieldLeafOverride() {
MockFieldMapper fieldMapper = new MockFieldMapper("test");
MappingLookup mappingLookup = new MappingLookup(Collections.singletonList(fieldMapper), Collections.emptyList(),
Collections.emptyList(), Collections.singletonList(new TestRuntimeField("test", "type")), 0);
MappingLookup mappingLookup = new MappingLookup("_doc", Collections.singletonList(fieldMapper), Collections.emptyList(),
Collections.emptyList(), Collections.singletonList(new TestRuntimeField("test", "type")), 0, null, false);
assertThat(mappingLookup.getMapper("test"), instanceOf(MockFieldMapper.class));
assertEquals(1, size(mappingLookup.fieldMappers()));
assertEquals(0, mappingLookup.objectMappers().size());
@ -53,8 +63,16 @@ public class MappingLookupTests extends ESTestCase {
MockFieldMapper fieldMapper = new MockFieldMapper("object.subfield");
ObjectMapper objectMapper = new ObjectMapper("object", "object", new Explicit<>(true, true), ObjectMapper.Nested.NO,
ObjectMapper.Dynamic.TRUE, Collections.singletonMap("object.subfield", fieldMapper), Version.CURRENT);
MappingLookup mappingLookup = new MappingLookup(Collections.singletonList(fieldMapper), Collections.singletonList(objectMapper),
Collections.emptyList(), Collections.singletonList(new TestRuntimeField("object.subfield", "type")), 0);
MappingLookup mappingLookup = new MappingLookup(
"_doc",
Collections.singletonList(fieldMapper),
Collections.singletonList(objectMapper),
Collections.emptyList(),
Collections.singletonList(new TestRuntimeField("object.subfield", "type")),
0,
null,
false
);
assertThat(mappingLookup.getMapper("object.subfield"), instanceOf(MockFieldMapper.class));
assertEquals(1, size(mappingLookup.fieldMappers()));
assertEquals(1, mappingLookup.objectMappers().size());
@ -62,6 +80,41 @@ public class MappingLookupTests extends ESTestCase {
assertEquals(1, size(mappingLookup.fieldTypes().filter(ft -> true)));
}
public void testAnalyzers() throws IOException {
FakeFieldType fieldType1 = new FakeFieldType("field1");
FieldMapper fieldMapper1 = new FakeFieldMapper(fieldType1, "index1");
FakeFieldType fieldType2 = new FakeFieldType("field2");
FieldMapper fieldMapper2 = new FakeFieldMapper(fieldType2, "index2");
MappingLookup mappingLookup = new MappingLookup(
"_doc",
Arrays.asList(fieldMapper1, fieldMapper2),
Collections.emptyList(),
Collections.emptyList(),
Collections.emptyList(),
0,
null,
false
);
assertAnalyzes(mappingLookup.indexAnalyzer("field1", f -> null), "field1", "index1");
assertAnalyzes(mappingLookup.indexAnalyzer("field2", f -> null), "field2", "index2");
expectThrows(IllegalArgumentException.class,
() -> mappingLookup.indexAnalyzer("field3", f -> {
throw new IllegalArgumentException();
}).tokenStream("field3", "blah"));
}
private void assertAnalyzes(Analyzer analyzer, String field, String output) throws IOException {
try (TokenStream tok = analyzer.tokenStream(field, new StringReader(""))) {
CharTermAttribute term = tok.addAttribute(CharTermAttribute.class);
assertTrue(tok.incrementToken());
assertEquals(output, term.toString());
}
}
private static int size(Iterable<?> iterable) {
int count = 0;
for (Object obj : iterable) {
@ -69,4 +122,76 @@ public class MappingLookupTests extends ESTestCase {
}
return count;
}
private static class FakeAnalyzer extends Analyzer {
private final String output;
FakeAnalyzer(String output) {
this.output = output;
}
@Override
protected TokenStreamComponents createComponents(String fieldName) {
Tokenizer tokenizer = new Tokenizer() {
boolean incremented = false;
final CharTermAttribute term = addAttribute(CharTermAttribute.class);
@Override
public boolean incrementToken() {
if (incremented) {
return false;
}
term.setLength(0).append(output);
incremented = true;
return true;
}
};
return new TokenStreamComponents(tokenizer);
}
}
static class FakeFieldType extends TermBasedFieldType {
private FakeFieldType(String name) {
super(name, true, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, Collections.emptyMap());
}
@Override
public ValueFetcher valueFetcher(QueryShardContext context, String format) {
throw new UnsupportedOperationException();
}
@Override
public String typeName() {
return "fake";
}
}
static class FakeFieldMapper extends FieldMapper {
final String indexedValue;
FakeFieldMapper(FakeFieldType fieldType, String indexedValue) {
super(fieldType.name(), fieldType,
new NamedAnalyzer("fake", AnalyzerScope.INDEX, new FakeAnalyzer(indexedValue)),
MultiFields.empty(), CopyTo.empty());
this.indexedValue = indexedValue;
}
@Override
protected void parseCreateField(ParseContext context) {
}
@Override
protected String contentType() {
return null;
}
@Override
public Builder getMergeBuilder() {
return null;
}
}
}

View file

@ -474,7 +474,7 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
IndexSearcher searcher = newSearcher(reader);
QueryShardContext context = new QueryShardContext(0, 0, indexSettings,
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, xContentRegistry(), writableRegistry(),
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null, xContentRegistry(), writableRegistry(),
null, null, () -> 0L, null, null, () -> true, null, emptyMap());
final int iters = 10;

View file

@ -213,7 +213,7 @@ public class RangeFieldTypeTests extends FieldTypeTestCase {
Settings indexSettings = Settings.builder()
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings);
return new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null,
return new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null,
xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null, () -> true, null, emptyMap());
}

View file

@ -36,10 +36,8 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.TriFunction;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -56,12 +54,15 @@ import org.elasticsearch.index.fielddata.plain.AbstractLeafOrdinalsFieldData;
import org.elasticsearch.index.mapper.IndexFieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.MappingLookupUtils;
import org.elasticsearch.index.mapper.MockFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.RuntimeFieldType;
import org.elasticsearch.index.mapper.TestRuntimeField;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.plugins.MapperPlugin;
@ -78,19 +79,22 @@ import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Supplier;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class QueryShardContextTests extends ESTestCase {
@ -204,6 +208,7 @@ public class QueryShardContextTests extends ESTestCase {
null,
null,
null,
null,
NamedXContentRegistry.EMPTY,
new NamedWriteableRegistry(Collections.emptyList()),
null,
@ -222,100 +227,101 @@ public class QueryShardContextTests extends ESTestCase {
}
public void testFielddataLookupSelfReference() {
QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> {
QueryShardContext queryShardContext = createQueryShardContext(
// simulate a runtime field that depends on itself e.g. field: doc['field']
return leafLookup.doc().get(field).toString();
}));
runtimeField("field", leafLookup -> leafLookup.doc().get("field").toString())
);
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("field", queryShardContext));
assertEquals("Cyclic dependency detected while resolving runtime fields: field -> field", iae.getMessage());
}
public void testFielddataLookupLooseLoop() {
QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> {
QueryShardContext queryShardContext = createQueryShardContext(
// simulate a runtime field cycle: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['1']
if (field.equals("4")) {
return leafLookup.doc().get("1").toString();
}
return leafLookup.doc().get(Integer.toString(Integer.parseInt(field) + 1)).toString();
}));
runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()),
runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()),
runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()),
runtimeField("4", leafLookup -> leafLookup.doc().get("1").get(0).toString())
);
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", queryShardContext));
assertEquals("Cyclic dependency detected while resolving runtime fields: 1 -> 2 -> 3 -> 4 -> 1", iae.getMessage());
}
public void testFielddataLookupTerminatesInLoop() {
QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> {
QueryShardContext queryShardContext = createQueryShardContext(
// simulate a runtime field cycle: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['4']
if (field.equals("4")) {
return leafLookup.doc().get("4").toString();
}
return leafLookup.doc().get(Integer.toString(Integer.parseInt(field) + 1)).toString();
}));
runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()),
runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()),
runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()),
runtimeField("4", leafLookup -> leafLookup.doc().get("4").get(0).toString())
);
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", queryShardContext));
assertEquals("Cyclic dependency detected while resolving runtime fields: 1 -> 2 -> 3 -> 4 -> 4", iae.getMessage());
}
public void testFielddataLookupSometimesLoop() throws IOException {
QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> {
QueryShardContext queryShardContext = createQueryShardContext(
// simulate a runtime field cycle in the second doc: 1: doc['2'] 2: doc['3'] 3: doc['4'] 4: doc['4']
runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()),
runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()),
runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()),
runtimeField("4", (leafLookup, docId) -> {
if (docId == 0) {
return field + "_" + docId;
} else {
assert docId == 1;
if (field.equals("field4")) {
return leafLookup.doc().get("field1").toString();
return "escape!";
}
int i = Integer.parseInt(field.substring(field.length() - 1));
return leafLookup.doc().get("field" + (i + 1)).toString();
}
}));
List<String> values = collect("field1", queryShardContext, new TermQuery(new Term("indexed_field", "first")));
assertEquals(List.of("field1_0"), values);
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("field1", queryShardContext));
assertEquals("Cyclic dependency detected while resolving runtime fields: field1 -> field2 -> field3 -> field4 -> field1",
iae.getMessage());
return leafLookup.doc().get("4").get(0).toString();
})
);
List<String> values = collect("1", queryShardContext, new TermQuery(new Term("indexed_field", "first")));
assertEquals(List.of("escape!"), values);
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", queryShardContext));
assertEquals("Cyclic dependency detected while resolving runtime fields: 1 -> 2 -> 3 -> 4 -> 4", iae.getMessage());
}
public void testFielddataLookupBeyondMaxDepth() {
QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> {
int i = Integer.parseInt(field);
return leafLookup.doc().get(Integer.toString(i + 1)).toString();
}));
QueryShardContext queryShardContext = createQueryShardContext(
runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()),
runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()),
runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()),
runtimeField("4", leafLookup -> leafLookup.doc().get("5").get(0).toString()),
runtimeField("5", leafLookup -> leafLookup.doc().get("6").get(0).toString()),
runtimeField("6", leafLookup -> "cat")
);
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> collect("1", queryShardContext));
assertEquals("Field requires resolving too many dependent fields: 1 -> 2 -> 3 -> 4 -> 5 -> 6", iae.getMessage());
}
public void testFielddataLookupReferencesBelowMaxDepth() throws IOException {
QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> {
int i = Integer.parseInt(field.substring(field.length() - 1));
if (i == 5) {
return "test";
} else {
ScriptDocValues<?> scriptDocValues = leafLookup.doc().get("field" + (i + 1));
return scriptDocValues.get(0).toString() + docId;
}
}));
assertEquals(List.of("test0000", "test1111"), collect("field1", queryShardContext));
QueryShardContext queryShardContext = createQueryShardContext(
runtimeField("1", leafLookup -> leafLookup.doc().get("2").get(0).toString()),
runtimeField("2", leafLookup -> leafLookup.doc().get("3").get(0).toString()),
runtimeField("3", leafLookup -> leafLookup.doc().get("4").get(0).toString()),
runtimeField("4", leafLookup -> leafLookup.doc().get("5").get(0).toString()),
runtimeField("5", (leafLookup, docId) -> "cat on doc " + docId)
);
assertEquals(List.of("cat on doc 0", "cat on doc 1"), collect("1", queryShardContext));
}
public void testFielddataLookupOneFieldManyReferences() throws IOException {
int numFields = randomIntBetween(5, 20);
QueryShardContext queryShardContext = createQueryShardContext("uuid", null, fieldTypeLookup((field, leafLookup, docId) -> {
if (field.equals("field")) {
List<RuntimeFieldType> fields = new ArrayList<>(numFields + 1);
fields.add(runtimeField("root", leafLookup -> {
StringBuilder value = new StringBuilder();
for (int i = 0; i < numFields; i++) {
value.append(leafLookup.doc().get("field" + i).get(0));
value.append(leafLookup.doc().get(i).get(0));
}
return value.toString();
} else {
return "test" + docId;
}
}));
StringBuilder expectedFirstDoc = new StringBuilder();
StringBuilder expectedSecondDoc = new StringBuilder();
StringBuilder expected = new StringBuilder();
for (int i = 0; i < numFields; i++) {
expectedFirstDoc.append("test0");
expectedSecondDoc.append("test1");
String fieldValue = Integer.toString(i);
fields.add(runtimeField(Integer.toString(i), leafLookup -> fieldValue));
expected.append(i);
}
assertEquals(List.of(expectedFirstDoc.toString(), expectedSecondDoc.toString()), collect("field", queryShardContext));
assertEquals(
List.of(expected.toString(), expected.toString()),
collect("root", createQueryShardContext("uuid", null, MappingLookupUtils.fromTypes(List.of(), fields), Map.of(), List.of()))
);
}
public void testSearchRequestRuntimeFields() {
@ -331,7 +337,7 @@ public class QueryShardContextTests extends ESTestCase {
QueryShardContext qsc = createQueryShardContext(
"uuid",
null,
Map.of("pig", new MockFieldMapper.FakeFieldType("pig"), "cat", new MockFieldMapper.FakeFieldType("cat")),
MappingLookupUtils.fromTypes(new MockFieldMapper.FakeFieldType("pig"), new MockFieldMapper.FakeFieldType("cat")),
runtimeMappings,
Collections.singletonList(new TestRuntimeField.Plugin()));
assertTrue(qsc.isFieldMapped("cat"));
@ -347,43 +353,26 @@ public class QueryShardContextTests extends ESTestCase {
}
public static QueryShardContext createQueryShardContext(String indexUuid, String clusterAlias) {
return createQueryShardContext(indexUuid, clusterAlias, name -> {
throw new UnsupportedOperationException();
});
return createQueryShardContext(indexUuid, clusterAlias, MappingLookup.EMPTY, Map.of(), List.of());
}
private static QueryShardContext createQueryShardContext(RuntimeFieldType... fieldTypes) {
return createQueryShardContext(
"uuid",
null,
MappingLookupUtils.fromTypes(List.of(), List.of(fieldTypes)),
Collections.emptyMap(),
Collections.emptyList()
);
}
private static QueryShardContext createQueryShardContext(
String indexUuid,
String clusterAlias,
Function<String, MappedFieldType> fieldTypeLookup
) {
return createQueryShardContext(indexUuid, clusterAlias, new HashMap<>() {
@Override
public MappedFieldType get(Object key) {
return fieldTypeLookup.apply(key.toString());
}
}, Collections.emptyMap(), Collections.emptyList());
}
private static QueryShardContext createQueryShardContext(
String indexUuid,
String clusterAlias,
Map<String, MappedFieldType> fieldTypeLookup,
MappingLookup mappingLookup,
Map<String, Object> runtimeMappings,
List<MapperPlugin> mapperPlugins
) {
MapperService mapperService = createMapperService(indexUuid, fieldTypeLookup, mapperPlugins);
final long nowInMillis = randomNonNegativeLong();
return new QueryShardContext(
0, 0, mapperService.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE, null,
(mappedFieldType, idxName, searchLookup) -> mappedFieldType.fielddataBuilder(idxName, searchLookup).build(null, null),
mapperService, null, null, NamedXContentRegistry.EMPTY, new NamedWriteableRegistry(Collections.emptyList()),
null, null, () -> nowInMillis, clusterAlias, null, () -> true, null, runtimeMappings);
}
private static MapperService createMapperService(String indexUuid,
Map<String, MappedFieldType> fieldTypeLookup,
List<MapperPlugin> mapperPlugins) {
IndexMetadata.Builder indexMetadataBuilder = new IndexMetadata.Builder("index");
indexMetadataBuilder.settings(Settings.builder().put("index.version.created", Version.CURRENT)
.put("index.number_of_shards", 1)
@ -391,36 +380,69 @@ public class QueryShardContextTests extends ESTestCase {
.put(IndexMetadata.SETTING_INDEX_UUID, indexUuid)
);
IndexMetadata indexMetadata = indexMetadataBuilder.build();
IndexAnalyzers indexAnalyzers = new IndexAnalyzers(
Collections.singletonMap("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, null)),
Collections.emptyMap(), Collections.emptyMap()
);
IndexSettings indexSettings = new IndexSettings(indexMetadata, Settings.EMPTY);
MapperService mapperService = createMapperService(indexSettings, mapperPlugins);
final long nowInMillis = randomNonNegativeLong();
return new QueryShardContext(
0,
0,
indexSettings,
BigArrays.NON_RECYCLING_INSTANCE,
null,
(mappedFieldType, idxName, searchLookup) -> mappedFieldType.fielddataBuilder(idxName, searchLookup).build(null, null),
mapperService,
mappingLookup,
null,
null,
NamedXContentRegistry.EMPTY,
new NamedWriteableRegistry(Collections.emptyList()),
null,
null,
() -> nowInMillis,
clusterAlias,
null,
() -> true,
null,
runtimeMappings
);
}
private static MapperService createMapperService(
IndexSettings indexSettings,
List<MapperPlugin> mapperPlugins
) {
IndexAnalyzers indexAnalyzers = new IndexAnalyzers(
singletonMap("default", new NamedAnalyzer("default", AnalyzerScope.INDEX, null)),
emptyMap(),
emptyMap()
);
IndicesModule indicesModule = new IndicesModule(mapperPlugins);
MapperRegistry mapperRegistry = indicesModule.getMapperRegistry();
SimilarityService similarityService = new SimilarityService(indexSettings, null, Collections.emptyMap());
return new MapperService(indexSettings, indexAnalyzers, NamedXContentRegistry.EMPTY, similarityService,
mapperRegistry, () -> {
throw new UnsupportedOperationException();
}, () -> true, null) {
@Override
public MappedFieldType fieldType(String name) {
return fieldTypeLookup.get(name);
Supplier<QueryShardContext> queryShardContextSupplier = () -> { throw new UnsupportedOperationException(); };
MapperService mapperService = mock(MapperService.class);
when(mapperService.getIndexAnalyzers()).thenReturn(indexAnalyzers);
when(mapperService.parserContext()).thenReturn(new Mapper.TypeParser.ParserContext(
null,
mapperRegistry.getMapperParsers()::get,
mapperRegistry.getRuntimeFieldTypeParsers()::get,
indexSettings.getIndexVersionCreated(),
queryShardContextSupplier,
null,
null,
indexAnalyzers,
indexSettings,
() -> true,
false
));
return mapperService;
}
@Override
public Set<String> simpleMatchToFullName(String pattern) {
if (Regex.isMatchAllPattern(pattern)) {
return Collections.unmodifiableSet(fieldTypeLookup.keySet());
}
throw new UnsupportedOperationException();
}
};
private static RuntimeFieldType runtimeField(String name, Function<LeafSearchLookup, String> runtimeDocValues) {
return runtimeField(name, (leafLookup, docId) -> runtimeDocValues.apply(leafLookup));
}
private static Function<String, MappedFieldType> fieldTypeLookup(
TriFunction<String, LeafSearchLookup, Integer, String> runtimeDocValues) {
return name -> new TestRuntimeField(name, null) {
private static RuntimeFieldType runtimeField(String name, BiFunction<LeafSearchLookup, Integer, String> runtimeDocValues) {
return new TestRuntimeField(name, null) {
@Override
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName,
Supplier<SearchLookup> searchLookup) {
@ -460,7 +482,7 @@ public class QueryShardContextTests extends ESTestCase {
LeafSearchLookup leafLookup = searchLookup.get()
.getLeafSearchLookup(context);
leafLookup.setDocument(docId);
value = runtimeDocValues.apply(name, leafLookup, docId);
value = runtimeDocValues.apply(leafLookup, docId);
}
};
}

View file

@ -41,9 +41,28 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
public void testRewriteMissingField() throws Exception {
IndexService indexService = createIndex("test");
IndexReader reader = new MultiReader();
QueryRewriteContext context = new QueryShardContext(0, 0, indexService.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE,
null, null, indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(),
null, new IndexSearcher(reader), null, null, null, () -> true, null, emptyMap());
QueryRewriteContext context = new QueryShardContext(
0,
0,
indexService.getIndexSettings(),
BigArrays.NON_RECYCLING_INSTANCE,
null,
null,
indexService.mapperService(),
indexService.mapperService().mappingLookup(),
null,
null,
xContentRegistry(),
writableRegistry(),
null,
new IndexSearcher(reader),
null,
null,
null,
() -> true,
null,
emptyMap()
);
RangeQueryBuilder range = new RangeQueryBuilder("foo");
assertEquals(Relation.DISJOINT, range.getRelation(context));
}
@ -60,7 +79,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
indexService.mapperService().merge("type",
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
QueryRewriteContext context = new QueryShardContext(0, 0, indexService.getIndexSettings(), null, null, null,
indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(),
indexService.mapperService(), indexService.mapperService().mappingLookup(), null, null, xContentRegistry(), writableRegistry(),
null, null, null, null, null, () -> true, null, emptyMap());
RangeQueryBuilder range = new RangeQueryBuilder("foo");
// can't make assumptions on a missing reader, so it must return INTERSECT
@ -79,9 +98,28 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase {
indexService.mapperService().merge("type",
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
IndexReader reader = new MultiReader();
QueryRewriteContext context = new QueryShardContext(0, 0, indexService.getIndexSettings(), BigArrays.NON_RECYCLING_INSTANCE,
null, null, indexService.mapperService(), null, null, xContentRegistry(), writableRegistry(),
null, new IndexSearcher(reader), null, null, null, () -> true, null, emptyMap());
QueryRewriteContext context = new QueryShardContext(
0,
0,
indexService.getIndexSettings(),
BigArrays.NON_RECYCLING_INSTANCE,
null,
null,
indexService.mapperService(),
indexService.mapperService().mappingLookup(),
null,
null,
xContentRegistry(),
writableRegistry(),
null,
new IndexSearcher(reader),
null,
null,
null,
() -> true,
null,
emptyMap()
);
RangeQueryBuilder range = new RangeQueryBuilder("foo");
// no values -> DISJOINT
assertEquals(Relation.DISJOINT, range.getRelation(context));

View file

@ -43,12 +43,16 @@ import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.cache.request.ShardRequestCache;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.MappingLookupUtils;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
public class IndicesRequestCacheTests extends ESTestCase {
@ -62,6 +66,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
MappingLookup.CacheKey mappingKey = MappingLookupUtils.fromTypes().cacheKey();
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false);
AtomicBoolean indexShard = new AtomicBoolean(true);
@ -69,7 +74,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0);
BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes);
BytesReference value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes);
assertEquals("foo", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
@ -80,14 +85,14 @@ public class IndicesRequestCacheTests extends ESTestCase {
// cache hit
entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0);
value = cache.getOrCompute(entity, loader, reader, termBytes);
value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes);
assertEquals("foo", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertTrue(loader.loadedFromCache);
assertEquals(1, cache.count());
assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length());
assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > value.length());
assertEquals(1, cache.numRegisteredCloseListeners());
// Closing the cache doesn't modify an already returned CacheEntity
@ -103,7 +108,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
assertEquals(0, requestCacheStats.stats().getEvictions());
assertTrue(loader.loadedFromCache);
assertEquals(0, cache.count());
assertEquals(0, requestCacheStats.stats().getMemorySize().bytesAsInt());
assertEquals(0L, requestCacheStats.stats().getMemorySize().getBytes());
IOUtils.close(reader, writer, dir, cache);
assertEquals(0, cache.numRegisteredCloseListeners());
@ -111,6 +116,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
public void testCacheDifferentReaders() throws Exception {
IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY);
MappingLookup.CacheKey mappingKey = MappingLookupUtils.fromTypes().cacheKey();
AtomicBoolean indexShard = new AtomicBoolean(true);
ShardRequestCache requestCacheStats = new ShardRequestCache();
Directory dir = newDirectory();
@ -131,33 +137,33 @@ public class IndicesRequestCacheTests extends ESTestCase {
// initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0);
BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes);
BytesReference value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes);
assertEquals("foo", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertFalse(loader.loadedFromCache);
assertEquals(1, cache.count());
assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length());
final int cacheSize = requestCacheStats.stats().getMemorySize().bytesAsInt();
assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > value.length());
final long cacheSize = requestCacheStats.stats().getMemorySize().getBytes();
assertEquals(1, cache.numRegisteredCloseListeners());
// cache the second
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(secondReader, 0);
value = cache.getOrCompute(entity, loader, secondReader, termBytes);
value = cache.getOrCompute(entity, loader, mappingKey, secondReader, termBytes);
assertEquals("bar", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertFalse(loader.loadedFromCache);
assertEquals(2, cache.count());
assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > cacheSize + value.length());
assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > cacheSize + value.length());
assertEquals(2, cache.numRegisteredCloseListeners());
secondEntity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(secondReader, 0);
value = cache.getOrCompute(secondEntity, loader, secondReader, termBytes);
value = cache.getOrCompute(secondEntity, loader, mappingKey, secondReader, termBytes);
assertEquals("bar", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
@ -167,7 +173,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0);
value = cache.getOrCompute(entity, loader, reader, termBytes);
value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes);
assertEquals("foo", value.streamInput().readString());
assertEquals(2, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
@ -182,10 +188,9 @@ public class IndicesRequestCacheTests extends ESTestCase {
assertEquals(0, requestCacheStats.stats().getEvictions());
assertTrue(loader.loadedFromCache);
assertEquals(1, cache.count());
assertEquals(cacheSize, requestCacheStats.stats().getMemorySize().bytesAsInt());
assertEquals(cacheSize, requestCacheStats.stats().getMemorySize().getBytes());
assertEquals(1, cache.numRegisteredCloseListeners());
// release
if (randomBoolean()) {
secondReader.close();
@ -198,13 +203,93 @@ public class IndicesRequestCacheTests extends ESTestCase {
assertEquals(0, requestCacheStats.stats().getEvictions());
assertTrue(loader.loadedFromCache);
assertEquals(0, cache.count());
assertEquals(0, requestCacheStats.stats().getMemorySize().bytesAsInt());
assertEquals(0L, requestCacheStats.stats().getMemorySize().getBytes());
IOUtils.close(secondReader, writer, dir, cache);
assertEquals(0, cache.numRegisteredCloseListeners());
}
public void testCacheDifferentMapping() throws Exception {
IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY);
MappingLookup.CacheKey mappingKey1 = MappingLookupUtils.fromTypes().cacheKey();
MappingLookup.CacheKey mappingKey2 = MappingLookupUtils.fromTypes().cacheKey();
AtomicBoolean indexShard = new AtomicBoolean(true);
ShardRequestCache requestCacheStats = new ShardRequestCache();
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
writer.addDocument(newDoc(1, "bar"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false);
// initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0);
BytesReference value = cache.getOrCompute(entity, loader, mappingKey1, reader, termBytes);
assertEquals("foo", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertFalse(loader.loadedFromCache);
assertEquals(1, cache.count());
assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > value.length());
final long cacheSize = requestCacheStats.stats().getMemorySize().getBytes();
assertEquals(1, cache.numRegisteredCloseListeners());
// cache the second
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 1);
value = cache.getOrCompute(entity, loader, mappingKey2, reader, termBytes);
assertEquals("bar", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertFalse(loader.loadedFromCache);
assertEquals(2, cache.count());
assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > cacheSize + value.length());
assertEquals(1, cache.numRegisteredCloseListeners());
secondEntity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 1);
value = cache.getOrCompute(secondEntity, loader, mappingKey2, reader, termBytes);
assertEquals("bar", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertTrue(loader.loadedFromCache);
assertEquals(2, cache.count());
entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0);
value = cache.getOrCompute(entity, loader, mappingKey1, reader, termBytes);
assertEquals("foo", value.streamInput().readString());
assertEquals(2, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertTrue(loader.loadedFromCache);
assertEquals(2, cache.count());
// Closing the cache doesn't change returned entities
if (randomBoolean()) {
reader.close();
} else {
indexShard.set(false); // closed shard but reader is still open
cache.clear(secondEntity);
}
cache.cleanCache();
assertEquals(2, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertTrue(loader.loadedFromCache);
assertEquals(0, cache.count());
assertEquals(0L, requestCacheStats.stats().getMemorySize().getBytes());
IOUtils.close(reader, writer, dir, cache);
assertEquals(0, cache.numRegisteredCloseListeners());
}
public void testEviction() throws Exception {
MappingLookup.CacheKey mappingKey = MappingLookupUtils.fromTypes().cacheKey();
final ByteSizeValue size;
{
IndicesRequestCache cache = new IndicesRequestCache(Settings.EMPTY);
@ -227,9 +312,9 @@ public class IndicesRequestCacheTests extends ESTestCase {
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
Loader secondLoader = new Loader(secondReader, 0);
BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes);
BytesReference value1 = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes);
assertEquals("foo", value1.streamInput().readString());
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes);
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, mappingKey, secondReader, termBytes);
assertEquals("bar", value2.streamInput().readString());
size = requestCacheStats.stats().getMemorySize();
IOUtils.close(reader, secondReader, writer, dir, cache);
@ -262,12 +347,12 @@ public class IndicesRequestCacheTests extends ESTestCase {
TestEntity thirddEntity = new TestEntity(requestCacheStats, indexShard);
Loader thirdLoader = new Loader(thirdReader, 0);
BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes);
BytesReference value1 = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes);
assertEquals("foo", value1.streamInput().readString());
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes);
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, mappingKey, secondReader, termBytes);
assertEquals("bar", value2.streamInput().readString());
logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize());
BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes);
BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, mappingKey, thirdReader, termBytes);
assertEquals("baz", value3.streamInput().readString());
assertEquals(2, cache.count());
assertEquals(1, requestCacheStats.stats().getEvictions());
@ -285,6 +370,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
writer.addDocument(newDoc(0, "foo"));
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
MappingLookup.CacheKey mappingKey = MappingLookupUtils.fromTypes().cacheKey();
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
BytesReference termBytes = XContentHelper.toXContent(termQuery, XContentType.JSON, false);
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
@ -293,31 +379,33 @@ public class IndicesRequestCacheTests extends ESTestCase {
writer.updateDocument(new Term("id", "0"), newDoc(0, "bar"));
DirectoryReader secondReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
MappingLookup.CacheKey secondMappingKey = MappingLookupUtils.fromTypes().cacheKey();
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
Loader secondLoader = new Loader(secondReader, 0);
writer.updateDocument(new Term("id", "0"), newDoc(0, "baz"));
DirectoryReader thirdReader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
MappingLookup.CacheKey thirdMappingKey = MappingLookupUtils.fromTypes().cacheKey();
AtomicBoolean differentIdentity = new AtomicBoolean(true);
TestEntity thirddEntity = new TestEntity(requestCacheStats, differentIdentity);
TestEntity thirdEntity = new TestEntity(requestCacheStats, differentIdentity);
Loader thirdLoader = new Loader(thirdReader, 0);
BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes);
BytesReference value1 = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes);
assertEquals("foo", value1.streamInput().readString());
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes);
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondMappingKey, secondReader, termBytes);
assertEquals("bar", value2.streamInput().readString());
logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize());
BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes);
BytesReference value3 = cache.getOrCompute(thirdEntity, thirdLoader, thirdMappingKey, thirdReader, termBytes);
assertEquals("baz", value3.streamInput().readString());
assertEquals(3, cache.count());
final long hitCount = requestCacheStats.stats().getHitCount();
// clear all for the indexShard Idendity even though is't still open
// clear all for the indexShard entity even though is't still open
cache.clear(randomFrom(entity, secondEntity));
cache.cleanCache();
assertEquals(1, cache.count());
// third has not been validated since it's a different identity
value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes);
value3 = cache.getOrCompute(thirdEntity, thirdLoader, thirdMappingKey, thirdReader, termBytes);
assertEquals(hitCount + 1, requestCacheStats.stats().getHitCount());
assertEquals("baz", value3.streamInput().readString());
@ -367,6 +455,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig());
writer.addDocument(newDoc(0, "foo"));
MappingLookup.CacheKey mappingKey = MappingLookupUtils.fromTypes().cacheKey();
DirectoryReader reader = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer),
new ShardId("foo", "bar", 1));
TermQueryBuilder termQuery = new TermQueryBuilder("id", "0");
@ -376,7 +465,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0);
BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes);
BytesReference value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes);
assertEquals("foo", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
@ -387,28 +476,28 @@ public class IndicesRequestCacheTests extends ESTestCase {
// cache hit
entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0);
value = cache.getOrCompute(entity, loader, reader, termBytes);
value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes);
assertEquals("foo", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertTrue(loader.loadedFromCache);
assertEquals(1, cache.count());
assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length());
assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > value.length());
assertEquals(1, cache.numRegisteredCloseListeners());
// load again after invalidate
entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0);
cache.invalidate(entity, reader, termBytes);
value = cache.getOrCompute(entity, loader, reader, termBytes);
cache.invalidate(entity, mappingKey, reader, termBytes);
value = cache.getOrCompute(entity, loader, mappingKey, reader, termBytes);
assertEquals("foo", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertFalse(loader.loadedFromCache);
assertEquals(1, cache.count());
assertTrue(requestCacheStats.stats().getMemorySize().bytesAsInt() > value.length());
assertTrue(requestCacheStats.stats().getMemorySize().getBytes() > value.length());
assertEquals(1, cache.numRegisteredCloseListeners());
// release
@ -423,15 +512,17 @@ public class IndicesRequestCacheTests extends ESTestCase {
assertEquals(2, requestCacheStats.stats().getMissCount());
assertEquals(0, requestCacheStats.stats().getEvictions());
assertEquals(0, cache.count());
assertEquals(0, requestCacheStats.stats().getMemorySize().bytesAsInt());
assertEquals(0L, requestCacheStats.stats().getMemorySize().getBytes());
IOUtils.close(reader, writer, dir, cache);
assertEquals(0, cache.numRegisteredCloseListeners());
}
public void testEqualsKey() throws IOException {
public void testKeyEqualsAndHashCode() throws IOException {
AtomicBoolean trueBoolean = new AtomicBoolean(true);
AtomicBoolean falseBoolean = new AtomicBoolean(false);
MappingLookup.CacheKey mKey1 = MappingLookupUtils.fromTypes().cacheKey();
MappingLookup.CacheKey mKey2 = MappingLookupUtils.fromTypes().cacheKey();
Directory dir = newDirectory();
IndexWriterConfig config = newIndexWriterConfig();
IndexWriter writer = new IndexWriter(dir, config);
@ -441,19 +532,51 @@ public class IndicesRequestCacheTests extends ESTestCase {
IndexReader reader2 = DirectoryReader.open(writer);
IndexReader.CacheKey rKey2 = reader2.getReaderCacheHelper().getKey();
IOUtils.close(reader1, reader2, writer, dir);
IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(1));
IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(1));
IndicesRequestCache.Key key3 = new IndicesRequestCache.Key(new TestEntity(null, falseBoolean), rKey1, new TestBytesReference(1));
IndicesRequestCache.Key key4 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey2, new TestBytesReference(1));
IndicesRequestCache.Key key5 = new IndicesRequestCache.Key(new TestEntity(null, trueBoolean), rKey1, new TestBytesReference(2));
String s = "Some other random object";
assertEquals(key1, key1);
assertEquals(key1, key2);
List<IndicesRequestCache.Key> keys = new ArrayList<>();
for (AtomicBoolean bool : new AtomicBoolean[] { trueBoolean, falseBoolean }) {
for (MappingLookup.CacheKey mKey : new MappingLookup.CacheKey[] { mKey1, mKey2 }) {
for (IndexReader.CacheKey rKey : new IndexReader.CacheKey[] { rKey1, rKey2 }) {
for (BytesReference requestKey : new BytesReference[] { new TestBytesReference(1), new TestBytesReference(2) }) {
keys.add(new IndicesRequestCache.Key(new TestEntity(null, bool), mKey, rKey, requestKey));
}
}
}
}
for (IndicesRequestCache.Key key : keys) {
assertNotEquals(key, null);
assertNotEquals(key, "Some other random object");
}
for (IndicesRequestCache.Key key1 : keys) {
assertNotEquals(key1, null);
assertNotEquals(key1, s);
assertNotEquals(key1, key3);
assertNotEquals(key1, key4);
assertNotEquals(key1, key5);
for (IndicesRequestCache.Key key2 : keys) {
if (key1 == key2) {
assertEquals(key1, key2);
assertEquals(key1.hashCode(), key2.hashCode());
} else {
assertNotEquals(key1, key2);
assertNotEquals(key1.hashCode(), key2.hashCode());
/*
* If we made random keys it'd be possible for us to have
* hash collisions and for the assertion above to fail.
* But we don't use random keys for this test.
*/
}
}
}
IndicesRequestCache.Key key1 = new IndicesRequestCache.Key(
new TestEntity(null, trueBoolean),
mKey1,
rKey1,
new TestBytesReference(1)
);
IndicesRequestCache.Key key2 = new IndicesRequestCache.Key(
new TestEntity(null, trueBoolean),
mKey1,
rKey1,
new TestBytesReference(1)
);
assertEquals(key1, key2);
assertEquals(key1.hashCode(), key2.hashCode());
}
private class TestBytesReference extends AbstractBytesReference {

View file

@ -33,6 +33,7 @@ import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.indices.IndicesRequestCache.Key;
import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
@ -287,7 +288,8 @@ public class IndicesServiceCloseTests extends ESTestCase {
@Override
public void onRemoval(RemovalNotification<Key, BytesReference> notification) {}
};
cache.getOrCompute(cacheEntity, () -> new BytesArray("bar"), searcher.getDirectoryReader(), new BytesArray("foo"));
MappingLookup.CacheKey mappingCacheKey = indexService.mapperService().mappingLookup().cacheKey();
cache.getOrCompute(cacheEntity, () -> new BytesArray("bar"), mappingCacheKey, searcher.getDirectoryReader(), new BytesArray("foo"));
assertEquals(1L, cache.count());
searcher.close();

View file

@ -37,15 +37,18 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NestedPathFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
@ -878,4 +881,20 @@ public class NestedAggregatorTests extends AggregatorTestCase {
return documents;
}
@Override
protected List<ObjectMapper> objectMappers() {
return MOCK_OBJECT_MAPPERS;
}
static final List<ObjectMapper> MOCK_OBJECT_MAPPERS = List.of(
nestedObject(NESTED_OBJECT),
nestedObject(NESTED_OBJECT + "." + NESTED_OBJECT2),
nestedObject("nested_reseller"),
nestedObject("nested_chapters"),
nestedObject("nested_field")
);
public static ObjectMapper nestedObject(String path) {
return new ObjectMapper.Builder(path, Version.CURRENT).nested(ObjectMapper.Nested.newNested()).build(new ContentPath());
}
}

View file

@ -31,6 +31,7 @@ import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NestedPathFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.search.aggregations.AggregationBuilder;
@ -250,4 +251,8 @@ public class ReverseNestedAggregatorTests extends AggregatorTestCase {
}, NestedAggregatorTests.resellersMappedFields());
}
@Override
protected List<ObjectMapper> objectMappers() {
return NestedAggregatorTests.MOCK_OBJECT_MAPPERS;
}
}

View file

@ -46,6 +46,7 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NestedPathFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.RangeFieldMapper;
import org.elasticsearch.index.mapper.RangeType;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
@ -64,6 +65,7 @@ import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuil
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
import org.elasticsearch.search.aggregations.bucket.nested.InternalNested;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorTests;
import org.elasticsearch.search.aggregations.metrics.InternalTopHits;
import org.elasticsearch.search.aggregations.metrics.Max;
import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder;
@ -601,4 +603,10 @@ public class RareTermsAggregatorTests extends AggregatorTestCase {
* buckets we should have left after each reduction.
*/
}
@Override
protected List<ObjectMapper> objectMappers() {
return List.of(NestedAggregatorTests.nestedObject("nested_object"));
}
}

View file

@ -35,7 +35,9 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.mapper.BinaryFieldMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MockFieldMapper;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
import org.elasticsearch.search.aggregations.AggregationBuilder;
@ -49,6 +51,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sampler;
import static org.elasticsearch.search.aggregations.AggregationBuilders.significantText;
@ -346,4 +349,9 @@ public class SignificantTextAggregatorTests extends AggregatorTestCase {
}
}
}
@Override
protected FieldMapper buildMockFieldMapper(MappedFieldType ft) {
return new MockFieldMapper(ft, Map.of(ft.name(), ft.getTextSearchInfo().getSearchAnalyzer()));
}
}

View file

@ -53,6 +53,7 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NestedPathFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.RangeFieldMapper;
import org.elasticsearch.index.mapper.RangeType;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
@ -84,6 +85,7 @@ import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuil
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
import org.elasticsearch.search.aggregations.bucket.nested.InternalNested;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorTests;
import org.elasticsearch.search.aggregations.metrics.InternalTopHits;
import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder;
@ -1500,4 +1502,8 @@ public class TermsAggregatorTests extends AggregatorTestCase {
return result;
}
@Override
protected List<ObjectMapper> objectMappers() {
return List.of(NestedAggregatorTests.nestedObject("nested_object"));
}
}

View file

@ -737,7 +737,27 @@ public class FieldFetcherTests extends MapperServiceTestCase {
.put(IndexMetadata.SETTING_INDEX_UUID, "uuid").build();
IndexMetadata indexMetadata = new IndexMetadata.Builder("index").settings(settings).build();
IndexSettings indexSettings = new IndexSettings(indexMetadata, settings);
return new QueryShardContext(0, 0, indexSettings, null, null, null, mapperService, null, null, null, null, null, null, null, null,
null, null, null, emptyMap());
return new QueryShardContext(
0,
0,
indexSettings,
null,
null,
null,
mapperService,
mapperService.mappingLookup(),
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
emptyMap()
);
}
}

View file

@ -279,7 +279,7 @@ public class HighlightBuilderTests extends ESTestCase {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings);
// shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter
QueryShardContext mockShardContext = new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE,
null, null, null, null, null, xContentRegistry(), namedWriteableRegistry,
null, null, null, null, null, null, xContentRegistry(), namedWriteableRegistry,
null, null, System::currentTimeMillis, null, null, () -> true, null, emptyMap()) {
@Override
public MappedFieldType getFieldType(String name) {

View file

@ -143,7 +143,7 @@ public class QueryRescorerBuilderTests extends ESTestCase {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings);
// shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer
QueryShardContext mockShardContext = new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE,
null, null, null, null, null,
null, null, null, null, null, null,
xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null, null, () -> true, null, emptyMap()) {
@Override
public MappedFieldType getFieldType(String name) {
@ -187,7 +187,7 @@ public class QueryRescorerBuilderTests extends ESTestCase {
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAlphaOfLengthBetween(1, 10), indexSettings);
// shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer
QueryShardContext mockShardContext = new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE,
null, null, null, null, null,
null, null, null, null, null, null,
xContentRegistry(), namedWriteableRegistry, null, null, () -> nowInMillis, null, null, () -> true, null, emptyMap()) {
@Override
public MappedFieldType getFieldType(String name) {

View file

@ -202,7 +202,7 @@ public abstract class AbstractSortTestCase<T extends SortBuilder<T>> extends EST
return builder.build(new IndexFieldDataCache.None(), null);
};
return new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, bitsetFilterCache, indexFieldDataLookup,
null, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, searcher,
null, null, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, searcher,
() -> randomNonNegativeLong(), null, null, () -> true, null, emptyMap()) {
@Override

View file

@ -39,6 +39,8 @@ import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.MappingLookupUtils;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.query.QueryShardContext;
@ -163,10 +165,8 @@ public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBui
Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index(randomAlphaOfLengthBetween(1, 10), "_na_"),
indexSettings);
MapperService mapperService = mock(MapperService.class);
ScriptService scriptService = mock(ScriptService.class);
MappedFieldType fieldType = mockFieldType(suggestionBuilder.field());
when(mapperService.fieldType(any(String.class))).thenReturn(fieldType);
IndexAnalyzers indexAnalyzers = new IndexAnalyzers(
new HashMap<>() {
@Override
@ -176,11 +176,13 @@ public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBui
},
Collections.emptyMap(),
Collections.emptyMap());
MapperService mapperService = mock(MapperService.class);
when(mapperService.getIndexAnalyzers()).thenReturn(indexAnalyzers);
MappingLookup lookup = MappingLookupUtils.fromTypes(fieldType);
when(scriptService.compile(any(Script.class), any())).then(invocation -> new TestTemplateService.MockTemplateScript.Factory(
((Script) invocation.getArguments()[0]).getIdOrCode()));
QueryShardContext mockShardContext = new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null,
null, mapperService, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, null,
null, mapperService, lookup, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, null,
System::currentTimeMillis, null, null, () -> true, null, emptyMap());
SuggestionContext suggestionContext = suggestionBuilder.build(mockShardContext);
@ -214,13 +216,9 @@ public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBui
Settings indexSettings = builder.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(new Index(randomAlphaOfLengthBetween(1, 10), "_na_"),
indexSettings);
MapperService mapperService = mock(MapperService.class);
ScriptService scriptService = mock(ScriptService.class);
when(mapperService.getNamedAnalyzer(any(String.class))).then(
invocation -> new NamedAnalyzer((String) invocation.getArguments()[0], AnalyzerScope.INDEX, new SimpleAnalyzer()));
QueryShardContext mockShardContext = new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, null,
null, mapperService, null, scriptService, xContentRegistry(), namedWriteableRegistry, null, null,
null, mock(MapperService.class), MappingLookup.EMPTY, null, null, xContentRegistry(), namedWriteableRegistry, null, null,
System::currentTimeMillis, null, null, () -> true, null, emptyMap());
if (randomBoolean()) {
mockShardContext.setAllowUnmappedFields(randomBoolean());

View file

@ -0,0 +1,36 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import java.util.Arrays;
import java.util.List;
import static java.util.stream.Collectors.toList;
public class MappingLookupUtils {
public static MappingLookup fromTypes(MappedFieldType... types) {
return fromTypes(Arrays.asList(types), List.of());
}
public static MappingLookup fromTypes(List<MappedFieldType> concreteFields, List<RuntimeFieldType> runtimeFields) {
List<FieldMapper> mappers = concreteFields.stream().map(MockFieldMapper::new).collect(toList());
return new MappingLookup("_doc", mappers, List.of(), List.of(), runtimeFields, 0, souceToParse -> null, true);
}
}

View file

@ -19,10 +19,12 @@
package org.elasticsearch.index.mapper;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.query.QueryShardContext;
import java.util.Collections;
import java.util.List;
import java.util.Map;
// this sucks how much must be overridden just do get a dummy field mapper...
public class MockFieldMapper extends FieldMapper {
@ -32,7 +34,11 @@ public class MockFieldMapper extends FieldMapper {
}
public MockFieldMapper(MappedFieldType fieldType) {
super(findSimpleName(fieldType.name()), fieldType,
this(fieldType, Map.of());
}
public MockFieldMapper(MappedFieldType fieldType, Map<String, NamedAnalyzer> indexAnalyzers) {
super(findSimpleName(fieldType.name()), fieldType, indexAnalyzers,
MultiFields.empty(), new CopyTo.Builder().build());
}

View file

@ -44,6 +44,7 @@ import org.apache.lucene.search.QueryCachingPolicy;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.Version;
@ -55,7 +56,6 @@ import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.settings.Settings;
@ -71,7 +71,6 @@ import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache.Listener;
import org.elasticsearch.index.cache.query.DisabledQueryCache;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
@ -87,9 +86,10 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.MockFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.ObjectMapper.Nested;
import org.elasticsearch.index.mapper.RangeFieldMapper;
import org.elasticsearch.index.mapper.RangeType;
import org.elasticsearch.index.mapper.TextFieldMapper;
@ -142,11 +142,10 @@ import java.util.function.Supplier;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toList;
import static org.elasticsearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@ -156,7 +155,6 @@ import static org.mockito.Mockito.when;
* {@link AggregationBuilder} instance.
*/
public abstract class AggregatorTestCase extends ESTestCase {
private static final String NESTEDFIELD_PREFIX = "nested_";
private List<Aggregator> releasables = new ArrayList<>();
protected ValuesSourceRegistry valuesSourceRegistry;
@ -227,38 +225,41 @@ public abstract class AggregatorTestCase extends ESTestCase {
*/
BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), breakerService).withCircuitBreaking();
// TODO: now just needed for top_hits, this will need to be revised for other agg unit tests:
MapperService mapperService = mock(MapperService.class);
when(mapperService.getIndexSettings()).thenReturn(indexSettings);
when(mapperService.hasNested()).thenReturn(false);
when(mapperService.indexAnalyzer(anyString(), any())).thenReturn(Lucene.STANDARD_ANALYZER); // for significant text
for (MappedFieldType type : fieldTypes) {
String name = type.name();
when(mapperService.fieldType(name)).thenReturn(type);
// Alias each field to <name>-alias so everyone can test aliases
when(mapperService.fieldType(name + "-alias")).thenReturn(type);
}
when(mapperService.getObjectMapper(anyString())).thenAnswer(invocation -> {
String fieldName = (String) invocation.getArguments()[0];
if (fieldName.startsWith(NESTEDFIELD_PREFIX)) {
return new ObjectMapper.Builder(fieldName, Version.CURRENT).nested(Nested.newNested()).build(new ContentPath());
}
return null;
});
MappingLookup mappingLookup = new MappingLookup(
"_doc",
Arrays.stream(fieldTypes).map(this::buildMockFieldMapper).collect(toList()),
objectMappers(),
// Alias all fields to <name>-alias to test aliases
Arrays.stream(fieldTypes)
.map(ft -> new FieldAliasMapper(ft.name() + "-alias", ft.name() + "-alias", ft.name()))
.collect(toList()),
List.of(),
0,
sourceToParse -> null,
true
);
TriFunction<MappedFieldType, String, Supplier<SearchLookup>, IndexFieldData<?>> fieldDataBuilder = (
fieldType,
s,
searchLookup) -> fieldType.fielddataBuilder(mapperService.getIndexSettings().getIndex().getName(), searchLookup)
searchLookup) -> fieldType.fielddataBuilder(indexSettings.getIndex().getName(), searchLookup)
.build(new IndexFieldDataCache.None(), breakerService);
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, new BitsetFilterCache.Listener() {
@Override
public void onRemoval(ShardId shardId, Accountable accountable) {}
@Override
public void onCache(ShardId shardId, Accountable accountable) {}
});
QueryShardContext queryShardContext = new QueryShardContext(
0,
-1,
indexSettings,
bigArrays,
null,
bitsetFilterCache,
fieldDataBuilder,
mapperService,
null,
mappingLookup,
null,
getMockScriptService(),
xContentRegistry(),
@ -274,13 +275,12 @@ public abstract class AggregatorTestCase extends ESTestCase {
);
MultiBucketConsumer consumer = new MultiBucketConsumer(maxBucket, breakerService.getBreaker(CircuitBreaker.REQUEST));
BitsetFilterCache bitsetFilterCache = new BitsetFilterCache(indexSettings, mock(Listener.class));
return new ProductionAggregationContext(
queryShardContext,
query,
null,
consumer,
() -> buildSubSearchContext(mapperService, queryShardContext, bitsetFilterCache),
() -> buildSubSearchContext(indexSettings, queryShardContext, bitsetFilterCache),
releasables::add,
bitsetFilterCache,
randomInt(),
@ -289,16 +289,32 @@ public abstract class AggregatorTestCase extends ESTestCase {
);
}
/**
* Build a {@link FieldMapper} to create the {@link MappingLookup} used for the aggs.
* {@code protected} so subclasses can have it.
*/
protected FieldMapper buildMockFieldMapper(MappedFieldType ft) {
return new MockFieldMapper(ft);
}
/**
* {@link ObjectMapper}s to add to the lookup. By default we don't need
* any {@link ObjectMapper}s but testing nested objects will require adding some.
*/
protected List<ObjectMapper> objectMappers() {
return List.of();
}
/**
* Build a {@link SubSearchContext}s to power {@code top_hits}.
*/
private SubSearchContext buildSubSearchContext(
MapperService mapperService,
IndexSettings indexSettings,
QueryShardContext queryShardContext,
BitsetFilterCache bitsetFilterCache
) {
SearchContext ctx = mock(SearchContext.class);
QueryCache queryCache = new DisabledQueryCache(mapperService.getIndexSettings());
QueryCache queryCache = new DisabledQueryCache(indexSettings);
QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() {
@Override
public void onUse(Query query) {
@ -325,11 +341,13 @@ public abstract class AggregatorTestCase extends ESTestCase {
}
when(ctx.fetchPhase()).thenReturn(new FetchPhase(Arrays.asList(new FetchSourcePhase(), new FetchDocValuesPhase())));
when(ctx.getQueryShardContext()).thenReturn(queryShardContext);
NestedDocuments nestedDocuments = new NestedDocuments(mapperService, bitsetFilterCache::getBitSetProducer);
when(ctx.getNestedDocuments()).thenReturn(nestedDocuments);
IndexShard indexShard = mock(IndexShard.class);
when(indexShard.shardId()).thenReturn(new ShardId("test", "test", 0));
when(ctx.indexShard()).thenReturn(indexShard);
MapperService mapperService = mock(MapperService.class);
when(mapperService.hasNested()).thenReturn(false);
NestedDocuments nested = new NestedDocuments(mapperService, bitsetFilterCache::getBitSetProducer);
when(ctx.getNestedDocuments()).thenReturn(nested);
return new SubSearchContext(ctx);
}
@ -610,7 +628,6 @@ public abstract class AggregatorTestCase extends ESTestCase {
*/
public void testSupportedFieldTypes() throws IOException {
MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
Settings settings = Settings.builder().put("index.version.created", Version.CURRENT.id).build();
String fieldName = "typeTestFieldName";
List<ValuesSourceType> supportedVSTypes = getSupportedValuesSourceTypes();
List<String> unsupportedMappedFieldTypes = unsupportedMappedFieldTypes();

View file

@ -411,9 +411,28 @@ public abstract class AbstractBuilderTestCase extends ESTestCase {
}
QueryShardContext createShardContext(IndexSearcher searcher) {
return new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, bitsetFilterCache,
indexFieldDataService::getForField, mapperService, similarityService, scriptService, xContentRegistry,
namedWriteableRegistry, this.client, searcher, () -> nowInMillis, null, indexNameMatcher(), () -> true, null, emptyMap());
return new QueryShardContext(
0,
0,
idxSettings,
BigArrays.NON_RECYCLING_INSTANCE,
bitsetFilterCache,
indexFieldDataService::getForField,
mapperService,
mapperService.mappingLookup(),
similarityService,
scriptService,
xContentRegistry,
namedWriteableRegistry,
this.client,
searcher,
() -> nowInMillis,
null,
indexNameMatcher(),
() -> true,
null,
emptyMap()
);
}
ScriptModule createScriptModule(List<ScriptPlugin> scriptPlugins) {

View file

@ -33,7 +33,9 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.MappingLookupUtils;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.TermQueryBuilder;
@ -74,7 +76,6 @@ import static org.mockito.Mockito.when;
public class DocumentSubsetBitsetCacheTests extends ESTestCase {
private static final String MISSING_FIELD_NAME = "does-not-exist";
private static final int FIELD_COUNT = 10;
private ExecutorService singleThreadExecutor;
@ -106,7 +107,7 @@ public class DocumentSubsetBitsetCacheTests extends ESTestCase {
public void testNullBitSetIsReturnedForNonMatchingQuery() throws Exception {
final DocumentSubsetBitsetCache cache = newCache(Settings.EMPTY);
runTestOnIndex((shardContext, leafContext) -> {
final Query query = QueryBuilders.termQuery(MISSING_FIELD_NAME, "any-value").rewrite(shardContext).toQuery(shardContext);
final Query query = QueryBuilders.termQuery("not-mapped", "any-value").rewrite(shardContext).toQuery(shardContext);
final BitSet bitSet = cache.getBitSet(query, leafContext);
assertThat(bitSet, nullValue());
});
@ -536,7 +537,7 @@ public class DocumentSubsetBitsetCacheTests extends ESTestCase {
}
}
private TestIndexContext testIndex(MapperService mapperService, Client client) throws IOException {
private TestIndexContext testIndex(MappingLookup mappingLookup, Client client) throws IOException {
TestIndexContext context = null;
final long nowInMillis = randomNonNegativeLong();
@ -564,7 +565,7 @@ public class DocumentSubsetBitsetCacheTests extends ESTestCase {
final LeafReaderContext leaf = directoryReader.leaves().get(0);
final QueryShardContext shardContext = new QueryShardContext(shardId.id(), 0, indexSettings, BigArrays.NON_RECYCLING_INSTANCE,
null, null, mapperService, null, null, xContentRegistry(), writableRegistry(),
null, null, null, mappingLookup, null, null, xContentRegistry(), writableRegistry(),
client, new IndexSearcher(directoryReader), () -> nowInMillis, null, null, () -> true, null, emptyMap());
context = new TestIndexContext(directory, iw, directoryReader, shardContext, leaf);
@ -585,15 +586,14 @@ public class DocumentSubsetBitsetCacheTests extends ESTestCase {
}
private void runTestOnIndices(int numberIndices, CheckedConsumer<List<TestIndexContext>, Exception> body) throws Exception {
final MapperService mapperService = mock(MapperService.class);
when(mapperService.fieldType(Mockito.anyString())).thenAnswer(invocation -> {
final String fieldName = (String) invocation.getArguments()[0];
if (fieldName.equals(MISSING_FIELD_NAME)) {
return null;
} else {
return new KeywordFieldMapper.KeywordFieldType(fieldName);
List<MappedFieldType> types = new ArrayList<>();
for (int i = 0; i < 11; i++) { // the tests use fields 1 to 10.
// This field has a value.
types.add(new KeywordFieldMapper.KeywordFieldType("field-" + i));
// This field never has a value
types.add(new KeywordFieldMapper.KeywordFieldType("dne-" + i));
}
});
MappingLookup mappingLookup = MappingLookupUtils.fromTypes(types, List.of());
final Client client = mock(Client.class);
when(client.settings()).thenReturn(Settings.EMPTY);
@ -601,7 +601,7 @@ public class DocumentSubsetBitsetCacheTests extends ESTestCase {
final List<TestIndexContext> context = new ArrayList<>(numberIndices);
try {
for (int i = 0; i < numberIndices; i++) {
context.add(testIndex(mapperService, client));
context.add(testIndex(mappingLookup, client));
}
body.accept(context);

View file

@ -29,15 +29,15 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType;
import org.elasticsearch.index.mapper.MappingLookup;
import org.elasticsearch.index.mapper.MappingLookupUtils;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.TermsQueryBuilder;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.license.XPackLicenseState.Feature;
import org.elasticsearch.mock.orig.Mockito;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.test.AbstractBuilderTestCase;
@ -49,7 +49,6 @@ import org.elasticsearch.xpack.core.security.authz.permission.DocumentPermission
import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissions;
import org.elasticsearch.xpack.core.security.user.User;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.Executors;
@ -59,7 +58,6 @@ import static java.util.Collections.singleton;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
@ -68,15 +66,8 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
public void testDLS() throws Exception {
ShardId shardId = new ShardId("_index", "_na_", 0);
MapperService mapperService = mock(MapperService.class);
MappingLookup mappingLookup = MappingLookupUtils.fromTypes(new KeywordFieldType("field"));
ScriptService scriptService = mock(ScriptService.class);
when(mapperService.documentMapper()).thenReturn(null);
when(mapperService.simpleMatchToFullName(anyString()))
.then(invocationOnMock -> Collections.singletonList((String) invocationOnMock.getArguments()[0]));
when(mapperService.fieldType(Mockito.anyString())).then(invocation -> {
final String fieldName = (String) invocation.getArguments()[0];
return new KeywordFieldMapper.KeywordFieldType(fieldName);
});
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext);
@ -91,7 +82,7 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
when(client.settings()).thenReturn(Settings.EMPTY);
final long nowInMillis = randomNonNegativeLong();
QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), 0, indexSettings, BigArrays.NON_RECYCLING_INSTANCE,
null, null, mapperService, null, null, xContentRegistry(), writableRegistry(),
null, null, null, mappingLookup, null, null, xContentRegistry(), writableRegistry(),
client, null, () -> nowInMillis, null, null, () -> true, null, emptyMap());
QueryShardContext queryShardContext = spy(realQueryShardContext);
DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY, Executors.newSingleThreadExecutor());
@ -182,15 +173,12 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
public void testDLSWithLimitedPermissions() throws Exception {
ShardId shardId = new ShardId("_index", "_na_", 0);
MapperService mapperService = mock(MapperService.class);
MappingLookup mappingLookup = MappingLookupUtils.fromTypes(
new KeywordFieldType("field"),
new KeywordFieldType("f1"),
new KeywordFieldType("f2")
);
ScriptService scriptService = mock(ScriptService.class);
when(mapperService.documentMapper()).thenReturn(null);
when(mapperService.simpleMatchToFullName(anyString()))
.then(invocationOnMock -> Collections.singletonList((String) invocationOnMock.getArguments()[0]));
when(mapperService.fieldType(Mockito.anyString())).then(invocation -> {
final String fieldName = (String) invocation.getArguments()[0];
return new KeywordFieldMapper.KeywordFieldType(fieldName);
});
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
final SecurityContext securityContext = new SecurityContext(Settings.EMPTY, threadContext);
@ -223,7 +211,7 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT
when(client.settings()).thenReturn(Settings.EMPTY);
final long nowInMillis = randomNonNegativeLong();
QueryShardContext realQueryShardContext = new QueryShardContext(shardId.id(), 0, indexSettings, BigArrays.NON_RECYCLING_INSTANCE,
null, null, mapperService, null, null, xContentRegistry(), writableRegistry(),
null, null, null, mappingLookup, null, null, xContentRegistry(), writableRegistry(),
client, null, () -> nowInMillis, null, null, () -> true, null, emptyMap());
QueryShardContext queryShardContext = spy(realQueryShardContext);
DocumentSubsetBitsetCache bitsetCache = new DocumentSubsetBitsetCache(Settings.EMPTY, Executors.newSingleThreadExecutor());

View file

@ -39,7 +39,7 @@ public class FlattenedFieldLookupTests extends ESTestCase {
String fieldName = "object1.object2.field";
FlattenedFieldMapper mapper = createFlattenedMapper(fieldName);
FieldTypeLookup lookup = new FieldTypeLookup(singletonList(mapper), emptyList(), emptyList());
FieldTypeLookup lookup = new FieldTypeLookup("_doc", singletonList(mapper), emptyList(), emptyList());
assertEquals(mapper.fieldType(), lookup.get(fieldName));
String objectKey = "key1.key2";
@ -60,7 +60,7 @@ public class FlattenedFieldLookupTests extends ESTestCase {
String aliasName = "alias";
FieldAliasMapper alias = new FieldAliasMapper(aliasName, aliasName, fieldName);
FieldTypeLookup lookup = new FieldTypeLookup(singletonList(mapper), singletonList(alias), emptyList());
FieldTypeLookup lookup = new FieldTypeLookup("_doc", singletonList(mapper), singletonList(alias), emptyList());
assertEquals(mapper.fieldType(), lookup.get(aliasName));
String objectKey = "key1.key2";
@ -83,11 +83,11 @@ public class FlattenedFieldLookupTests extends ESTestCase {
FlattenedFieldMapper mapper2 = createFlattenedMapper(field2);
FlattenedFieldMapper mapper3 = createFlattenedMapper(field3);
FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(mapper1, mapper2), emptyList(), emptyList());
FieldTypeLookup lookup = new FieldTypeLookup("_doc", Arrays.asList(mapper1, mapper2), emptyList(), emptyList());
assertNotNull(lookup.get(field1 + ".some.key"));
assertNotNull(lookup.get(field2 + ".some.key"));
lookup = new FieldTypeLookup(Arrays.asList(mapper1, mapper2, mapper3), emptyList(), emptyList());
lookup = new FieldTypeLookup("_doc", Arrays.asList(mapper1, mapper2, mapper3), emptyList(), emptyList());
assertNotNull(lookup.get(field1 + ".some.key"));
assertNotNull(lookup.get(field2 + ".some.key"));
assertNotNull(lookup.get(field3 + ".some.key"));
@ -124,7 +124,7 @@ public class FlattenedFieldLookupTests extends ESTestCase {
MockFieldMapper mapper = new MockFieldMapper("foo");
FlattenedFieldMapper flattenedMapper = createFlattenedMapper("object1.object2.field");
FieldTypeLookup lookup = new FieldTypeLookup(Arrays.asList(mapper, flattenedMapper), emptyList(), emptyList());
FieldTypeLookup lookup = new FieldTypeLookup("_doc", Arrays.asList(mapper, flattenedMapper), emptyList(), emptyList());
Set<String> fieldNames = new HashSet<>();
lookup.filter(ft -> true).forEach(ft -> fieldNames.add(ft.name()));

View file

@ -89,7 +89,7 @@ public class RollupIndexerIndexingTests extends AggregatorTestCase {
private void setup() {
settings = createIndexSettings();
queryShardContext = new QueryShardContext(0, 0, settings,
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null,
BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, null,
null, null, null, null, () -> 0L, null, null, () -> true, null, emptyMap());
}

View file

@ -128,6 +128,7 @@ setup:
search:
index: sensor
body:
size: 0
aggs:
dow:
terms:
@ -138,6 +139,31 @@ setup:
- match: {aggregations.dow.buckets.1.key: Monday}
- match: {aggregations.dow.buckets.1.doc_count: 1}
# Update the mapping and make sure the cache doesn't still have the old results
- do:
indices.put_mapping:
index: sensor
body:
runtime:
day_of_week:
type: keyword
script: |
emit(doc['timestamp'].value.dayOfWeekEnum.getDisplayName(TextStyle.SHORT, Locale.ROOT));
- do:
search:
index: sensor
body:
size: 0
aggs:
dow:
terms:
field: day_of_week
- match: {hits.total.value: 6}
- match: {aggregations.dow.buckets.0.key: Fri}
- match: {aggregations.dow.buckets.0.doc_count: 1}
- match: {aggregations.dow.buckets.1.key: Mon}
- match: {aggregations.dow.buckets.1.doc_count: 1}
---
"term query":
- do:

View file

@ -898,7 +898,7 @@ public class WildcardFieldMapperTests extends MapperTestCase {
return builder.build(new IndexFieldDataCache.None(), null);
};
return new QueryShardContext(0, 0, idxSettings, BigArrays.NON_RECYCLING_INSTANCE, bitsetFilterCache, indexFieldDataLookup,
null, null, null, xContentRegistry(), null, null, null,
null, null, null, null, xContentRegistry(), null, null, null,
() -> randomNonNegativeLong(), null, null, () -> true, null, emptyMap()) {
@Override