diff --git a/.buildkite/pipelines/lucene-snapshot/run-tests.yml b/.buildkite/pipelines/lucene-snapshot/run-tests.yml index c76c54a56494..f7293e051467 100644 --- a/.buildkite/pipelines/lucene-snapshot/run-tests.yml +++ b/.buildkite/pipelines/lucene-snapshot/run-tests.yml @@ -56,7 +56,6 @@ steps: matrix: setup: BWC_VERSION: - - 7.17.13 - 8.9.1 - 8.10.0 agents: diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java index 569e8909e1e1..b294fe97c7e7 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/vector/VectorScorerBenchmark.java @@ -19,7 +19,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import org.elasticsearch.common.logging.LogConfigurator; import org.elasticsearch.core.IOUtils; @@ -217,19 +217,17 @@ public class VectorScorerBenchmark { return 1 / (1f + adjustedDistance); } - RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { + QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); var slice = in.slice("values", 0, in.length()); return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice); } - RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim) - throws IOException { + RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values); } - RandomVectorScorer luceneScorer(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec) - throws IOException { + RandomVectorScorer luceneScorer(QuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorer(sim, values, queryVec); } diff --git a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt index 58ccf69406ff..5388f942be8d 100644 --- a/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt +++ b/build-tools-internal/src/main/resources/forbidden/es-server-signatures.txt @@ -59,10 +59,6 @@ org.apache.lucene.util.Version#parseLeniently(java.lang.String) org.apache.lucene.index.NoMergePolicy#INSTANCE @ explicit use of NoMergePolicy risks forgetting to configure NoMergeScheduler; use org.elasticsearch.common.lucene.Lucene#indexWriterConfigWithNoMerging() instead. -@defaultMessage Spawns a new thread which is solely under lucenes control use ThreadPool#relativeTimeInMillis instead -org.apache.lucene.search.TimeLimitingCollector#getGlobalTimerThread() -org.apache.lucene.search.TimeLimitingCollector#getGlobalCounter() - @defaultMessage Don't interrupt threads use FutureUtils#cancel(Future) instead java.util.concurrent.Future#cancel(boolean) diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index 169c187ef115..6bc3c2ad4d25 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 9.0.0 -lucene = 9.12.0 +lucene = 10.0.0 bundled_jdk_vendor = openjdk bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4 diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index a523c3ec85ba..f55d90933ed6 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -62,6 +62,9 @@ 23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.setAsTypeCache 23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.asTypeUncached +# Lucene 10: apply MADV_NORMAL advice to enable more aggressive readahead +-Dorg.apache.lucene.store.defaultReadAdvice=normal + ## heap dumps # generate a heap dump when an allocation from the Java heap fails; heap dumps diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index b65b974cd6b6..bdb0704fcd88 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.12.0 -:lucene_version_path: 9_12_0 +:lucene_version: 10.0.0 +:lucene_version_path: 10_0_0 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/docs/changelog/113482.yaml b/docs/changelog/113482.yaml new file mode 100644 index 000000000000..cb5823f0ccfc --- /dev/null +++ b/docs/changelog/113482.yaml @@ -0,0 +1,27 @@ +pr: 113482 +summary: The 'persian' analyzer has stemmer by default +area: Analysis +type: breaking +issues: +- 113050 +breaking: + title: The 'persian' analyzer has stemmer by default + area: Analysis + details: >- + Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch + exposes as 'persian' analyzer. Existing indices will keep the old + non-stemming behaviour while new indices will see the updated behaviour with + added stemming. + Users that wish to maintain the non-stemming behaviour need to define their + own analyzer as outlined in + https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. + Users that wish to use the new stemming behaviour for existing indices will + have to reindex their data. + impact: >- + Indexing with the 'persian' analyzer will produce slightly different tokens. + Users should check if this impacts their search results. If they wish to + maintain the legacy non-stemming behaviour they can define their own + analyzer equivalent as explained in + https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer. + notable: false + diff --git a/docs/changelog/113614.yaml b/docs/changelog/113614.yaml new file mode 100644 index 000000000000..bd9dcb3e3877 --- /dev/null +++ b/docs/changelog/113614.yaml @@ -0,0 +1,18 @@ +pr: 113614 +summary: The 'german2' stemmer is now an alias for the 'german' snowball stemmer +area: Analysis +type: breaking +issues: [] +breaking: + title: The "german2" snowball stemmer is now an alias for the "german" stemmer + area: Analysis + details: >- + Lucene 10 has merged the improved "german2" snowball language stemmer with the + "german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for + "german". This may results in slightly different tokens being generated for + terms with umlaut substitution (like "ue" for "ü" etc...) + impact: >- + Replace usages of "german2" with "german" in analysis configuration. Old + indices that use the "german" stemmer should be reindexed if possible. + notable: false + diff --git a/docs/changelog/114124.yaml b/docs/changelog/114124.yaml new file mode 100644 index 000000000000..c812c6a46890 --- /dev/null +++ b/docs/changelog/114124.yaml @@ -0,0 +1,18 @@ +pr: 114124 +summary: The Korean dictionary for Nori has been updated +area: Analysis +type: breaking +issues: [] +breaking: + title: The Korean dictionary for Nori has been updated + area: Analysis + details: >- + Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1). + For details see https://github.com/apache/lucene/issues/11452. Users + experiencing changes in search behaviour on existing data are advised to + reindex. + impact: >- + The change is small and should generally provide better analysis results. + Existing indices for full-text use cases should be reindexed though. + notable: false + diff --git a/docs/changelog/114146.yaml b/docs/changelog/114146.yaml new file mode 100644 index 000000000000..be2096a64105 --- /dev/null +++ b/docs/changelog/114146.yaml @@ -0,0 +1,20 @@ +pr: 114146 +summary: Snowball stemmers have been upgraded +area: Analysis +type: breaking +issues: [] +breaking: + title: Snowball stemmers have been upgraded + area: Analysis + details: >- + Lucene 10 ships with an upgrade of its Snowball stemmers. + For details see https://github.com/apache/lucene/issues/13209. Users using + Snowball stemmers that are experiencing changes in search behaviour on + existing data are advised to reindex. + impact: >- + The upgrade should generally provide improved stemming results. Small changes + in token analysis can lead to mismatches with previously index data, so + existing indices using Snowball stemmers as part of their analysis chain + should be reindexed. + notable: false + diff --git a/docs/changelog/114741.yaml b/docs/changelog/114741.yaml new file mode 100644 index 000000000000..ae45c183cddf --- /dev/null +++ b/docs/changelog/114741.yaml @@ -0,0 +1,5 @@ +pr: 114741 +summary: Upgrade to Lucene 10 +area: Search +type: upgrade +issues: [] diff --git a/docs/plugins/analysis-nori.asciidoc b/docs/plugins/analysis-nori.asciidoc index 02980a4ed8a8..0d3e76f71d23 100644 --- a/docs/plugins/analysis-nori.asciidoc +++ b/docs/plugins/analysis-nori.asciidoc @@ -244,11 +244,11 @@ Which responds with: "end_offset": 3, "type": "word", "position": 1, - "leftPOS": "J(Ending Particle)", + "leftPOS": "JKS(Subject case marker)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "J(Ending Particle)" + "rightPOS": "JKS(Subject case marker)" }, { "token": "깊", @@ -268,11 +268,11 @@ Which responds with: "end_offset": 6, "type": "word", "position": 3, - "leftPOS": "E(Verbal endings)", + "leftPOS": "ETM(Adnominal form transformative ending)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "E(Verbal endings)" + "rightPOS": "ETM(Adnominal form transformative ending)" }, { "token": "나무", @@ -292,11 +292,11 @@ Which responds with: "end_offset": 10, "type": "word", "position": 5, - "leftPOS": "J(Ending Particle)", + "leftPOS": "JX(Auxiliary postpositional particle)", "morphemes": null, "posType": "MORPHEME", "reading": null, - "rightPOS": "J(Ending Particle)" + "rightPOS": "JX(Auxiliary postpositional particle)" } ] }, diff --git a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc index 5273537389e3..881970787f5a 100644 --- a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc @@ -1430,7 +1430,8 @@ PUT /persian_example "decimal_digit", "arabic_normalization", "persian_normalization", - "persian_stop" + "persian_stop", + "persian_stem" ] } } diff --git a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc index 4cd088935af1..d9e2120afe6d 100644 --- a/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/stemmer-tokenfilter.asciidoc @@ -173,7 +173,6 @@ http://bvg.udc.es/recursos_lingua/stemming.jsp[`minimal_galician`] (Plural step German:: https://dl.acm.org/citation.cfm?id=1141523[*`light_german`*], https://snowballstem.org/algorithms/german/stemmer.html[`german`], -https://snowballstem.org/algorithms/german2/stemmer.html[`german2`], http://members.unine.ch/jacques.savoy/clef/morpho.pdf[`minimal_german`] Greek:: diff --git a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc index 2cf01b77d57a..5f9880738728 100644 --- a/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/pathhierarchy-tokenizer.asciidoc @@ -40,14 +40,14 @@ POST _analyze "start_offset": 0, "end_offset": 8, "type": "word", - "position": 0 + "position": 1 }, { "token": "/one/two/three", "start_offset": 0, "end_offset": 14, "type": "word", - "position": 0 + "position": 2 } ] } @@ -144,14 +144,14 @@ POST my-index-000001/_analyze "start_offset": 7, "end_offset": 18, "type": "word", - "position": 0 + "position": 1 }, { "token": "/three/four/five", "start_offset": 7, "end_offset": 23, "type": "word", - "position": 0 + "position": 2 } ] } @@ -178,14 +178,14 @@ If we were to set `reverse` to `true`, it would produce the following: [[analysis-pathhierarchy-tokenizer-detailed-examples]] === Detailed examples -A common use-case for the `path_hierarchy` tokenizer is filtering results by -file paths. If indexing a file path along with the data, the use of the -`path_hierarchy` tokenizer to analyze the path allows filtering the results +A common use-case for the `path_hierarchy` tokenizer is filtering results by +file paths. If indexing a file path along with the data, the use of the +`path_hierarchy` tokenizer to analyze the path allows filtering the results by different parts of the file path string. This example configures an index to have two custom analyzers and applies -those analyzers to multifields of the `file_path` text field that will +those analyzers to multifields of the `file_path` text field that will store filenames. One of the two analyzers uses reverse tokenization. Some sample documents are then indexed to represent some file paths for photos inside photo folders of two different users. @@ -264,8 +264,8 @@ POST file-path-test/_doc/5 -------------------------------------------------- -A search for a particular file path string against the text field matches all -the example documents, with Bob's documents ranking highest due to `bob` also +A search for a particular file path string against the text field matches all +the example documents, with Bob's documents ranking highest due to `bob` also being one of the terms created by the standard analyzer boosting relevance for Bob's documents. @@ -301,7 +301,7 @@ GET file-path-test/_search With the reverse parameter for this tokenizer, it's also possible to match from the other end of the file path, such as individual file names or a deep level subdirectory. The following example shows a search for all files named -`my_photo1.jpg` within any directory via the `file_path.tree_reversed` field +`my_photo1.jpg` within any directory via the `file_path.tree_reversed` field configured to use the reverse parameter in the mapping. @@ -342,7 +342,7 @@ POST file-path-test/_analyze It's also useful to be able to filter with file paths when combined with other -types of searches, such as this example looking for any files paths with `16` +types of searches, such as this example looking for any files paths with `16` that also must be in Alice's photo directory. [source,console] diff --git a/docs/reference/search/profile.asciidoc b/docs/reference/search/profile.asciidoc index 3fed14231808..5f1a0ccfdd6b 100644 --- a/docs/reference/search/profile.asciidoc +++ b/docs/reference/search/profile.asciidoc @@ -1298,7 +1298,7 @@ One of the `dfs.knn` sections for a shard looks like the following: "query" : [ { "type" : "DocAndScoreQuery", - "description" : "DocAndScore[100]", + "description" : "DocAndScoreQuery[0,...][0.008961825,...],0.008961825", "time_in_nanos" : 444414, "breakdown" : { "set_min_competitive_score_count" : 0, diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 0156f13b4b05..4d9b96184d07 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2824,129 +2824,129 @@ - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + - - - + + + diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java index e2aea6b3ebd9..4ed60b2f5e8b 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactory.java @@ -13,7 +13,7 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; @@ -39,7 +39,7 @@ public interface VectorScorerFactory { Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ); @@ -52,9 +52,5 @@ public interface VectorScorerFactory { * @param queryVector the query vector * @return an optional containing the vector scorer, or empty */ - Optional getInt7SQVectorScorer( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ); + Optional getInt7SQVectorScorer(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector); } diff --git a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java index a22d78798025..6248902c32e7 100644 --- a/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java +++ b/libs/simdvec/src/main/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java @@ -13,7 +13,7 @@ import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.store.IndexInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; @@ -25,7 +25,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory { public Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ) { throw new UnsupportedOperationException("should not reach here"); @@ -34,7 +34,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory { @Override public Optional getInt7SQVectorScorer( VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float[] queryVector ) { throw new UnsupportedOperationException("should not reach here"); diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java index a65fe582087d..a863d9e3448c 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/VectorScorerFactoryImpl.java @@ -15,7 +15,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.elasticsearch.nativeaccess.NativeAccess; import org.elasticsearch.simdvec.internal.Int7SQVectorScorer; import org.elasticsearch.simdvec.internal.Int7SQVectorScorerSupplier.DotProductSupplier; @@ -38,7 +38,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory { public Optional getInt7SQVectorScorerSupplier( VectorSimilarityType similarityType, IndexInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant ) { input = FilterIndexInput.unwrapOnlyTest(input); @@ -57,7 +57,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory { @Override public Optional getInt7SQVectorScorer( VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float[] queryVector ) { return Int7SQVectorScorer.create(sim, values, queryVector); diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java index 0b41436ce224..e02df124ad0f 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java @@ -11,18 +11,14 @@ package org.elasticsearch.simdvec.internal; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.hnsw.RandomVectorScorer; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.util.Optional; public final class Int7SQVectorScorer { // Unconditionally returns an empty optional on <= JDK 21, since the scorer is only supported on JDK 22+ - public static Optional create( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ) { + public static Optional create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) { return Optional.empty(); } diff --git a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java index f6d874cd3e72..198e10406056 100644 --- a/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java +++ b/libs/simdvec/src/main21/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorerSupplier.java @@ -12,7 +12,7 @@ package org.elasticsearch.simdvec.internal; import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity; import java.io.IOException; @@ -31,12 +31,12 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS final int maxOrd; final float scoreCorrectionConstant; final MemorySegmentAccessInput input; - final RandomAccessQuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds + final QuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds final ScalarQuantizedVectorSimilarity fallbackScorer; protected Int7SQVectorScorerSupplier( MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, + QuantizedByteVectorValues values, float scoreCorrectionConstant, ScalarQuantizedVectorSimilarity fallbackScorer ) { @@ -104,11 +104,7 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS public static final class EuclideanSupplier extends Int7SQVectorScorerSupplier { - public EuclideanSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public EuclideanSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(EUCLIDEAN, scoreCorrectionConstant, BITS)); } @@ -127,11 +123,7 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS public static final class DotProductSupplier extends Int7SQVectorScorerSupplier { - public DotProductSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public DotProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(DOT_PRODUCT, scoreCorrectionConstant, BITS)); } @@ -151,11 +143,7 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS public static final class MaxInnerProductSupplier extends Int7SQVectorScorerSupplier { - public MaxInnerProductSupplier( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - float scoreCorrectionConstant - ) { + public MaxInnerProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) { super(input, values, scoreCorrectionConstant, fromVectorSimilarity(MAXIMUM_INNER_PRODUCT, scoreCorrectionConstant, BITS)); } diff --git a/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java index c9659ea1af9a..3d0e1e71a374 100644 --- a/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java +++ b/libs/simdvec/src/main22/java/org/elasticsearch/simdvec/internal/Int7SQVectorScorer.java @@ -15,7 +15,7 @@ import org.apache.lucene.store.FilterIndexInput; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.MemorySegmentAccessInput; import org.apache.lucene.util.hnsw.RandomVectorScorer; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import java.io.IOException; @@ -35,11 +35,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr byte[] scratch; /** Return an optional whose value, if present, is the scorer. Otherwise, an empty optional is returned. */ - public static Optional create( - VectorSimilarityFunction sim, - RandomAccessQuantizedByteVectorValues values, - float[] queryVector - ) { + public static Optional create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) { checkDimensions(queryVector.length, values.dimension()); var input = values.getSlice(); if (input == null) { @@ -63,12 +59,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr }; } - Int7SQVectorScorer( - MemorySegmentAccessInput input, - RandomAccessQuantizedByteVectorValues values, - byte[] queryVector, - float queryCorrection - ) { + Int7SQVectorScorer(MemorySegmentAccessInput input, QuantizedByteVectorValues values, byte[] queryVector, float queryCorrection) { super(values); this.input = input; assert queryVector.length == values.getVectorByteLength(); @@ -105,7 +96,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr } public static final class DotProductScorer extends Int7SQVectorScorer { - public DotProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) { + public DotProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) { super(in, values, query, correction); } @@ -122,7 +113,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr } public static final class EuclideanScorer extends Int7SQVectorScorer { - public EuclideanScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) { + public EuclideanScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) { super(in, values, query, correction); } @@ -136,7 +127,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr } public static final class MaxInnerProductScorer extends Int7SQVectorScorer { - public MaxInnerProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float corr) { + public MaxInnerProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float corr) { super(in, values, query, corr); } diff --git a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java index db57dc936e79..0f967127f6f2 100644 --- a/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java +++ b/libs/simdvec/src/test/java/org/elasticsearch/simdvec/VectorScorerFactoryTests.java @@ -21,7 +21,7 @@ import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import java.io.IOException; @@ -431,14 +431,13 @@ public class VectorScorerFactoryTests extends AbstractVectorTestCase { } } - RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { + QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException { var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7); var slice = in.slice("values", 0, in.length()); return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice); } - RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim) - throws IOException { + RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException { return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values); } diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index b16c6eaaaa1d..f4f7e787d2b7 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -33,3 +33,7 @@ dependencies { artifacts { restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } + +tasks.named("yamlRestCompatTestTransform").configure { task -> + task.replaceValueInMatch("tokens.0.token", "absenț", "romanian") +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java new file mode 100644 index 000000000000..0eb8d916307a --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyRomanianStemmer.java @@ -0,0 +1,741 @@ +/* + * @notice + * Generated by Snowball 2.0.0 - https://snowballstem.org/ + * + * Modifications copyright (C) 2024 Elasticsearch B.V. + */ + +package org.elasticsearch.analysis.common; + +import org.tartarus.snowball.Among; + +/** +* This class implements the stemming algorithm defined by a snowball script. +* NOTE: This is the RomanianStemmer used in Lucene 9 and should only be used for backwards compatibility +*/ +@SuppressWarnings("checkstyle:DescendantToken") +class LegacyRomanianStemmer extends org.tartarus.snowball.SnowballStemmer { + + private static final java.lang.invoke.MethodHandles.Lookup methodObject = java.lang.invoke.MethodHandles.lookup(); + + private static final Among a_0[] = { new Among("", -1, 3), new Among("I", 0, 1), new Among("U", 0, 2) }; + + private static final Among a_1[] = { + new Among("ea", -1, 3), + new Among("a\u0163ia", -1, 7), + new Among("aua", -1, 2), + new Among("iua", -1, 4), + new Among("a\u0163ie", -1, 7), + new Among("ele", -1, 3), + new Among("ile", -1, 5), + new Among("iile", 6, 4), + new Among("iei", -1, 4), + new Among("atei", -1, 6), + new Among("ii", -1, 4), + new Among("ului", -1, 1), + new Among("ul", -1, 1), + new Among("elor", -1, 3), + new Among("ilor", -1, 4), + new Among("iilor", 14, 4) }; + + private static final Among a_2[] = { + new Among("icala", -1, 4), + new Among("iciva", -1, 4), + new Among("ativa", -1, 5), + new Among("itiva", -1, 6), + new Among("icale", -1, 4), + new Among("a\u0163iune", -1, 5), + new Among("i\u0163iune", -1, 6), + new Among("atoare", -1, 5), + new Among("itoare", -1, 6), + new Among("\u0103toare", -1, 5), + new Among("icitate", -1, 4), + new Among("abilitate", -1, 1), + new Among("ibilitate", -1, 2), + new Among("ivitate", -1, 3), + new Among("icive", -1, 4), + new Among("ative", -1, 5), + new Among("itive", -1, 6), + new Among("icali", -1, 4), + new Among("atori", -1, 5), + new Among("icatori", 18, 4), + new Among("itori", -1, 6), + new Among("\u0103tori", -1, 5), + new Among("icitati", -1, 4), + new Among("abilitati", -1, 1), + new Among("ivitati", -1, 3), + new Among("icivi", -1, 4), + new Among("ativi", -1, 5), + new Among("itivi", -1, 6), + new Among("icit\u0103i", -1, 4), + new Among("abilit\u0103i", -1, 1), + new Among("ivit\u0103i", -1, 3), + new Among("icit\u0103\u0163i", -1, 4), + new Among("abilit\u0103\u0163i", -1, 1), + new Among("ivit\u0103\u0163i", -1, 3), + new Among("ical", -1, 4), + new Among("ator", -1, 5), + new Among("icator", 35, 4), + new Among("itor", -1, 6), + new Among("\u0103tor", -1, 5), + new Among("iciv", -1, 4), + new Among("ativ", -1, 5), + new Among("itiv", -1, 6), + new Among("ical\u0103", -1, 4), + new Among("iciv\u0103", -1, 4), + new Among("ativ\u0103", -1, 5), + new Among("itiv\u0103", -1, 6) }; + + private static final Among a_3[] = { + new Among("ica", -1, 1), + new Among("abila", -1, 1), + new Among("ibila", -1, 1), + new Among("oasa", -1, 1), + new Among("ata", -1, 1), + new Among("ita", -1, 1), + new Among("anta", -1, 1), + new Among("ista", -1, 3), + new Among("uta", -1, 1), + new Among("iva", -1, 1), + new Among("ic", -1, 1), + new Among("ice", -1, 1), + new Among("abile", -1, 1), + new Among("ibile", -1, 1), + new Among("isme", -1, 3), + new Among("iune", -1, 2), + new Among("oase", -1, 1), + new Among("ate", -1, 1), + new Among("itate", 17, 1), + new Among("ite", -1, 1), + new Among("ante", -1, 1), + new Among("iste", -1, 3), + new Among("ute", -1, 1), + new Among("ive", -1, 1), + new Among("ici", -1, 1), + new Among("abili", -1, 1), + new Among("ibili", -1, 1), + new Among("iuni", -1, 2), + new Among("atori", -1, 1), + new Among("osi", -1, 1), + new Among("ati", -1, 1), + new Among("itati", 30, 1), + new Among("iti", -1, 1), + new Among("anti", -1, 1), + new Among("isti", -1, 3), + new Among("uti", -1, 1), + new Among("i\u015Fti", -1, 3), + new Among("ivi", -1, 1), + new Among("it\u0103i", -1, 1), + new Among("o\u015Fi", -1, 1), + new Among("it\u0103\u0163i", -1, 1), + new Among("abil", -1, 1), + new Among("ibil", -1, 1), + new Among("ism", -1, 3), + new Among("ator", -1, 1), + new Among("os", -1, 1), + new Among("at", -1, 1), + new Among("it", -1, 1), + new Among("ant", -1, 1), + new Among("ist", -1, 3), + new Among("ut", -1, 1), + new Among("iv", -1, 1), + new Among("ic\u0103", -1, 1), + new Among("abil\u0103", -1, 1), + new Among("ibil\u0103", -1, 1), + new Among("oas\u0103", -1, 1), + new Among("at\u0103", -1, 1), + new Among("it\u0103", -1, 1), + new Among("ant\u0103", -1, 1), + new Among("ist\u0103", -1, 3), + new Among("ut\u0103", -1, 1), + new Among("iv\u0103", -1, 1) }; + + private static final Among a_4[] = { + new Among("ea", -1, 1), + new Among("ia", -1, 1), + new Among("esc", -1, 1), + new Among("\u0103sc", -1, 1), + new Among("ind", -1, 1), + new Among("\u00E2nd", -1, 1), + new Among("are", -1, 1), + new Among("ere", -1, 1), + new Among("ire", -1, 1), + new Among("\u00E2re", -1, 1), + new Among("se", -1, 2), + new Among("ase", 10, 1), + new Among("sese", 10, 2), + new Among("ise", 10, 1), + new Among("use", 10, 1), + new Among("\u00E2se", 10, 1), + new Among("e\u015Fte", -1, 1), + new Among("\u0103\u015Fte", -1, 1), + new Among("eze", -1, 1), + new Among("ai", -1, 1), + new Among("eai", 19, 1), + new Among("iai", 19, 1), + new Among("sei", -1, 2), + new Among("e\u015Fti", -1, 1), + new Among("\u0103\u015Fti", -1, 1), + new Among("ui", -1, 1), + new Among("ezi", -1, 1), + new Among("\u00E2i", -1, 1), + new Among("a\u015Fi", -1, 1), + new Among("se\u015Fi", -1, 2), + new Among("ase\u015Fi", 29, 1), + new Among("sese\u015Fi", 29, 2), + new Among("ise\u015Fi", 29, 1), + new Among("use\u015Fi", 29, 1), + new Among("\u00E2se\u015Fi", 29, 1), + new Among("i\u015Fi", -1, 1), + new Among("u\u015Fi", -1, 1), + new Among("\u00E2\u015Fi", -1, 1), + new Among("a\u0163i", -1, 2), + new Among("ea\u0163i", 38, 1), + new Among("ia\u0163i", 38, 1), + new Among("e\u0163i", -1, 2), + new Among("i\u0163i", -1, 2), + new Among("\u00E2\u0163i", -1, 2), + new Among("ar\u0103\u0163i", -1, 1), + new Among("ser\u0103\u0163i", -1, 2), + new Among("aser\u0103\u0163i", 45, 1), + new Among("seser\u0103\u0163i", 45, 2), + new Among("iser\u0103\u0163i", 45, 1), + new Among("user\u0103\u0163i", 45, 1), + new Among("\u00E2ser\u0103\u0163i", 45, 1), + new Among("ir\u0103\u0163i", -1, 1), + new Among("ur\u0103\u0163i", -1, 1), + new Among("\u00E2r\u0103\u0163i", -1, 1), + new Among("am", -1, 1), + new Among("eam", 54, 1), + new Among("iam", 54, 1), + new Among("em", -1, 2), + new Among("asem", 57, 1), + new Among("sesem", 57, 2), + new Among("isem", 57, 1), + new Among("usem", 57, 1), + new Among("\u00E2sem", 57, 1), + new Among("im", -1, 2), + new Among("\u00E2m", -1, 2), + new Among("\u0103m", -1, 2), + new Among("ar\u0103m", 65, 1), + new Among("ser\u0103m", 65, 2), + new Among("aser\u0103m", 67, 1), + new Among("seser\u0103m", 67, 2), + new Among("iser\u0103m", 67, 1), + new Among("user\u0103m", 67, 1), + new Among("\u00E2ser\u0103m", 67, 1), + new Among("ir\u0103m", 65, 1), + new Among("ur\u0103m", 65, 1), + new Among("\u00E2r\u0103m", 65, 1), + new Among("au", -1, 1), + new Among("eau", 76, 1), + new Among("iau", 76, 1), + new Among("indu", -1, 1), + new Among("\u00E2ndu", -1, 1), + new Among("ez", -1, 1), + new Among("easc\u0103", -1, 1), + new Among("ar\u0103", -1, 1), + new Among("ser\u0103", -1, 2), + new Among("aser\u0103", 84, 1), + new Among("seser\u0103", 84, 2), + new Among("iser\u0103", 84, 1), + new Among("user\u0103", 84, 1), + new Among("\u00E2ser\u0103", 84, 1), + new Among("ir\u0103", -1, 1), + new Among("ur\u0103", -1, 1), + new Among("\u00E2r\u0103", -1, 1), + new Among("eaz\u0103", -1, 1) }; + + private static final Among a_5[] = { + new Among("a", -1, 1), + new Among("e", -1, 1), + new Among("ie", 1, 1), + new Among("i", -1, 1), + new Among("\u0103", -1, 1) }; + + private static final char g_v[] = { 17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 32, 0, 0, 4 }; + + private boolean B_standard_suffix_removed; + private int I_p2; + private int I_p1; + private int I_pV; + + private boolean r_prelude() { + while (true) { + int v_1 = cursor; + lab0: { + golab1: while (true) { + int v_2 = cursor; + lab2: { + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + bra = cursor; + lab3: { + int v_3 = cursor; + lab4: { + if (!(eq_s("u"))) { + break lab4; + } + ket = cursor; + if (!(in_grouping(g_v, 97, 259))) { + break lab4; + } + slice_from("U"); + break lab3; + } + cursor = v_3; + if (!(eq_s("i"))) { + break lab2; + } + ket = cursor; + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + slice_from("I"); + } + cursor = v_2; + break golab1; + } + cursor = v_2; + if (cursor >= limit) { + break lab0; + } + cursor++; + } + continue; + } + cursor = v_1; + break; + } + return true; + } + + private boolean r_mark_regions() { + I_pV = limit; + I_p1 = limit; + I_p2 = limit; + int v_1 = cursor; + lab0: { + lab1: { + int v_2 = cursor; + lab2: { + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + lab3: { + int v_3 = cursor; + lab4: { + if (!(out_grouping(g_v, 97, 259))) { + break lab4; + } + golab5: while (true) { + lab6: { + if (!(in_grouping(g_v, 97, 259))) { + break lab6; + } + break golab5; + } + if (cursor >= limit) { + break lab4; + } + cursor++; + } + break lab3; + } + cursor = v_3; + if (!(in_grouping(g_v, 97, 259))) { + break lab2; + } + golab7: while (true) { + lab8: { + if (!(out_grouping(g_v, 97, 259))) { + break lab8; + } + break golab7; + } + if (cursor >= limit) { + break lab2; + } + cursor++; + } + } + break lab1; + } + cursor = v_2; + if (!(out_grouping(g_v, 97, 259))) { + break lab0; + } + lab9: { + int v_6 = cursor; + lab10: { + if (!(out_grouping(g_v, 97, 259))) { + break lab10; + } + golab11: while (true) { + lab12: { + if (!(in_grouping(g_v, 97, 259))) { + break lab12; + } + break golab11; + } + if (cursor >= limit) { + break lab10; + } + cursor++; + } + break lab9; + } + cursor = v_6; + if (!(in_grouping(g_v, 97, 259))) { + break lab0; + } + if (cursor >= limit) { + break lab0; + } + cursor++; + } + } + I_pV = cursor; + } + cursor = v_1; + int v_8 = cursor; + lab13: { + golab14: while (true) { + lab15: { + if (!(in_grouping(g_v, 97, 259))) { + break lab15; + } + break golab14; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + golab16: while (true) { + lab17: { + if (!(out_grouping(g_v, 97, 259))) { + break lab17; + } + break golab16; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + I_p1 = cursor; + golab18: while (true) { + lab19: { + if (!(in_grouping(g_v, 97, 259))) { + break lab19; + } + break golab18; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + golab20: while (true) { + lab21: { + if (!(out_grouping(g_v, 97, 259))) { + break lab21; + } + break golab20; + } + if (cursor >= limit) { + break lab13; + } + cursor++; + } + I_p2 = cursor; + } + cursor = v_8; + return true; + } + + private boolean r_postlude() { + int among_var; + while (true) { + int v_1 = cursor; + lab0: { + bra = cursor; + among_var = find_among(a_0); + if (among_var == 0) { + break lab0; + } + ket = cursor; + switch (among_var) { + case 1: + slice_from("i"); + break; + case 2: + slice_from("u"); + break; + case 3: + if (cursor >= limit) { + break lab0; + } + cursor++; + break; + } + continue; + } + cursor = v_1; + break; + } + return true; + } + + private boolean r_RV() { + if (!(I_pV <= cursor)) { + return false; + } + return true; + } + + private boolean r_R1() { + if (!(I_p1 <= cursor)) { + return false; + } + return true; + } + + private boolean r_R2() { + if (!(I_p2 <= cursor)) { + return false; + } + return true; + } + + private boolean r_step_0() { + int among_var; + ket = cursor; + among_var = find_among_b(a_1); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R1()) { + return false; + } + switch (among_var) { + case 1: + slice_del(); + break; + case 2: + slice_from("a"); + break; + case 3: + slice_from("e"); + break; + case 4: + slice_from("i"); + break; + case 5: { + int v_1 = limit - cursor; + lab0: { + if (!(eq_s_b("ab"))) { + break lab0; + } + return false; + } + cursor = limit - v_1; + } + slice_from("i"); + break; + case 6: + slice_from("at"); + break; + case 7: + slice_from("a\u0163i"); + break; + } + return true; + } + + private boolean r_combo_suffix() { + int among_var; + int v_1 = limit - cursor; + ket = cursor; + among_var = find_among_b(a_2); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R1()) { + return false; + } + switch (among_var) { + case 1: + slice_from("abil"); + break; + case 2: + slice_from("ibil"); + break; + case 3: + slice_from("iv"); + break; + case 4: + slice_from("ic"); + break; + case 5: + slice_from("at"); + break; + case 6: + slice_from("it"); + break; + } + B_standard_suffix_removed = true; + cursor = limit - v_1; + return true; + } + + private boolean r_standard_suffix() { + int among_var; + B_standard_suffix_removed = false; + while (true) { + int v_1 = limit - cursor; + lab0: { + if (!r_combo_suffix()) { + break lab0; + } + continue; + } + cursor = limit - v_1; + break; + } + ket = cursor; + among_var = find_among_b(a_3); + if (among_var == 0) { + return false; + } + bra = cursor; + if (!r_R2()) { + return false; + } + switch (among_var) { + case 1: + slice_del(); + break; + case 2: + if (!(eq_s_b("\u0163"))) { + return false; + } + bra = cursor; + slice_from("t"); + break; + case 3: + slice_from("ist"); + break; + } + B_standard_suffix_removed = true; + return true; + } + + private boolean r_verb_suffix() { + int among_var; + if (cursor < I_pV) { + return false; + } + int v_2 = limit_backward; + limit_backward = I_pV; + ket = cursor; + among_var = find_among_b(a_4); + if (among_var == 0) { + limit_backward = v_2; + return false; + } + bra = cursor; + switch (among_var) { + case 1: + lab0: { + int v_3 = limit - cursor; + lab1: { + if (!(out_grouping_b(g_v, 97, 259))) { + break lab1; + } + break lab0; + } + cursor = limit - v_3; + if (!(eq_s_b("u"))) { + limit_backward = v_2; + return false; + } + } + slice_del(); + break; + case 2: + slice_del(); + break; + } + limit_backward = v_2; + return true; + } + + private boolean r_vowel_suffix() { + ket = cursor; + if (find_among_b(a_5) == 0) { + return false; + } + bra = cursor; + if (!r_RV()) { + return false; + } + slice_del(); + return true; + } + + @Override + public boolean stem() { + int v_1 = cursor; + r_prelude(); + cursor = v_1; + r_mark_regions(); + limit_backward = cursor; + cursor = limit; + int v_3 = limit - cursor; + r_step_0(); + cursor = limit - v_3; + int v_4 = limit - cursor; + r_standard_suffix(); + cursor = limit - v_4; + int v_5 = limit - cursor; + lab0: { + lab1: { + int v_6 = limit - cursor; + lab2: { + if (!(B_standard_suffix_removed)) { + break lab2; + } + break lab1; + } + cursor = limit - v_6; + if (!r_verb_suffix()) { + break lab0; + } + } + } + cursor = limit - v_5; + int v_7 = limit - cursor; + r_vowel_suffix(); + cursor = limit - v_7; + cursor = limit_backward; + int v_8 = cursor; + r_postlude(); + cursor = v_8; + return true; + } + + @Override + public boolean equals(Object o) { + return o instanceof LegacyRomanianStemmer; + } + + @Override + public int hashCode() { + return LegacyRomanianStemmer.class.getName().hashCode(); + } +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java index 9ea3a9fa4eee..917a45188123 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PersianAnalyzerProvider.java @@ -9,24 +9,72 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.LowerCaseFilter; +import org.apache.lucene.analysis.StopFilter; +import org.apache.lucene.analysis.StopwordAnalyzerBase; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.ar.ArabicNormalizationFilter; +import org.apache.lucene.analysis.core.DecimalDigitFilter; import org.apache.lucene.analysis.fa.PersianAnalyzer; +import org.apache.lucene.analysis.fa.PersianCharFilter; +import org.apache.lucene.analysis.fa.PersianNormalizationFilter; +import org.apache.lucene.analysis.standard.StandardTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; import org.elasticsearch.index.analysis.Analysis; -public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider { +import java.io.Reader; - private final PersianAnalyzer analyzer; +public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider { + + private final StopwordAnalyzerBase analyzer; PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(name, settings); - analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())); + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) { + // since Lucene 10 this analyzer contains stemming by default + analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())); + } else { + // for older index versions we need the old analyzer behaviour without stemming + analyzer = new StopwordAnalyzerBase(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())) { + + protected Analyzer.TokenStreamComponents createComponents(String fieldName) { + final Tokenizer source = new StandardTokenizer(); + TokenStream result = new LowerCaseFilter(source); + result = new DecimalDigitFilter(result); + result = new ArabicNormalizationFilter(result); + /* additional persian-specific normalization */ + result = new PersianNormalizationFilter(result); + /* + * the order here is important: the stopword list is normalized with the + * above! + */ + return new TokenStreamComponents(source, new StopFilter(result, stopwords)); + } + + protected TokenStream normalize(String fieldName, TokenStream in) { + TokenStream result = new LowerCaseFilter(in); + result = new DecimalDigitFilter(result); + result = new ArabicNormalizationFilter(result); + /* additional persian-specific normalization */ + result = new PersianNormalizationFilter(result); + return result; + } + + protected Reader initReader(String fieldName, Reader reader) { + return new PersianCharFilter(reader); + } + }; + } } @Override - public PersianAnalyzer get() { + public StopwordAnalyzerBase get() { return this.analyzer; } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java index cf33a38abd63..6c28df83a6d3 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/RomanianAnalyzerProvider.java @@ -9,28 +9,60 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.StopwordAnalyzerBase; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.core.LowerCaseFilter; +import org.apache.lucene.analysis.core.StopFilter; +import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter; import org.apache.lucene.analysis.ro.RomanianAnalyzer; +import org.apache.lucene.analysis.snowball.SnowballFilter; +import org.apache.lucene.analysis.standard.StandardTokenizer; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider; import org.elasticsearch.index.analysis.Analysis; -public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider { +public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider { - private final RomanianAnalyzer analyzer; + private final StopwordAnalyzerBase analyzer; RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(name, settings); - analyzer = new RomanianAnalyzer( - Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()), - Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET) - ); + CharArraySet stopwords = Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()); + CharArraySet stemExclusionSet = Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET); + if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) { + // since Lucene 10, this analyzer a modern unicode form and normalizes cedilla forms to forms with commas + analyzer = new RomanianAnalyzer(stopwords, stemExclusionSet); + } else { + // for older index versions we need the old behaviour without normalization + analyzer = new StopwordAnalyzerBase(Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet())) { + + protected Analyzer.TokenStreamComponents createComponents(String fieldName) { + final Tokenizer source = new StandardTokenizer(); + TokenStream result = new LowerCaseFilter(source); + result = new StopFilter(result, stopwords); + if (stemExclusionSet.isEmpty() == false) { + result = new SetKeywordMarkerFilter(result, stemExclusionSet); + } + result = new SnowballFilter(result, new LegacyRomanianStemmer()); + return new TokenStreamComponents(source, result); + } + + protected TokenStream normalize(String fieldName, TokenStream in) { + return new LowerCaseFilter(in); + } + }; + + } } @Override - public RomanianAnalyzer get() { + public StopwordAnalyzerBase get() { return this.analyzer; } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java index 1c71c6431151..7548c8ad2b88 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactory.java @@ -9,6 +9,7 @@ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.TokenFilter; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ar.ArabicStemFilter; import org.apache.lucene.analysis.bg.BulgarianStemFilter; @@ -38,8 +39,9 @@ import org.apache.lucene.analysis.it.ItalianLightStemFilter; import org.apache.lucene.analysis.lv.LatvianStemFilter; import org.apache.lucene.analysis.miscellaneous.EmptyTokenStream; import org.apache.lucene.analysis.no.NorwegianLightStemFilter; -import org.apache.lucene.analysis.no.NorwegianLightStemmer; +import org.apache.lucene.analysis.no.NorwegianLightStemFilterFactory; import org.apache.lucene.analysis.no.NorwegianMinimalStemFilter; +import org.apache.lucene.analysis.no.NorwegianMinimalStemFilterFactory; import org.apache.lucene.analysis.pt.PortugueseLightStemFilter; import org.apache.lucene.analysis.pt.PortugueseMinimalStemFilter; import org.apache.lucene.analysis.pt.PortugueseStemFilter; @@ -62,14 +64,11 @@ import org.tartarus.snowball.ext.EnglishStemmer; import org.tartarus.snowball.ext.EstonianStemmer; import org.tartarus.snowball.ext.FinnishStemmer; import org.tartarus.snowball.ext.FrenchStemmer; -import org.tartarus.snowball.ext.German2Stemmer; import org.tartarus.snowball.ext.GermanStemmer; import org.tartarus.snowball.ext.HungarianStemmer; import org.tartarus.snowball.ext.IrishStemmer; import org.tartarus.snowball.ext.ItalianStemmer; -import org.tartarus.snowball.ext.KpStemmer; import org.tartarus.snowball.ext.LithuanianStemmer; -import org.tartarus.snowball.ext.LovinsStemmer; import org.tartarus.snowball.ext.NorwegianStemmer; import org.tartarus.snowball.ext.PortugueseStemmer; import org.tartarus.snowball.ext.RomanianStemmer; @@ -80,6 +79,7 @@ import org.tartarus.snowball.ext.SwedishStemmer; import org.tartarus.snowball.ext.TurkishStemmer; import java.io.IOException; +import java.util.Collections; public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { @@ -87,27 +87,15 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { private static final TokenStream EMPTY_TOKEN_STREAM = new EmptyTokenStream(); - private String language; + private final String language; + + private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(StemmerTokenFilterFactory.class); StemmerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException { super(name, settings); this.language = Strings.capitalize(settings.get("language", settings.get("name", "porter"))); // check that we have a valid language by trying to create a TokenStream create(EMPTY_TOKEN_STREAM).close(); - if ("lovins".equalsIgnoreCase(language)) { - deprecationLogger.critical( - DeprecationCategory.ANALYSIS, - "lovins_deprecation", - "The [lovins] stemmer is deprecated and will be removed in a future version." - ); - } - if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - deprecationLogger.critical( - DeprecationCategory.ANALYSIS, - "dutch_kp_deprecation", - "The [dutch_kp] stemmer is deprecated and will be removed in a future version." - ); - } } @Override @@ -135,8 +123,17 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { } else if ("dutch".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new DutchStemmer()); } else if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new KpStemmer()); - + deprecationLogger.critical( + DeprecationCategory.ANALYSIS, + "dutch_kp_deprecation", + "The [dutch_kp] stemmer is deprecated and will be removed in a future version." + ); + return new TokenFilter(tokenStream) { + @Override + public boolean incrementToken() { + return false; + } + }; // English stemmers } else if ("english".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); @@ -145,7 +142,17 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { || "kstem".equalsIgnoreCase(language)) { return new KStemFilter(tokenStream); } else if ("lovins".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new LovinsStemmer()); + deprecationLogger.critical( + DeprecationCategory.ANALYSIS, + "lovins_deprecation", + "The [lovins] stemmer is deprecated and will be removed in a future version." + ); + return new TokenFilter(tokenStream) { + @Override + public boolean incrementToken() { + return false; + } + }; } else if ("porter".equalsIgnoreCase(language)) { return new PorterStemFilter(tokenStream); } else if ("porter2".equalsIgnoreCase(language)) { @@ -185,7 +192,13 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { } else if ("german".equalsIgnoreCase(language)) { return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("german2".equalsIgnoreCase(language)) { - return new SnowballFilter(tokenStream, new German2Stemmer()); + DEPRECATION_LOGGER.critical( + DeprecationCategory.ANALYSIS, + "german2_stemmer_deprecation", + "The 'german2' stemmer has been deprecated and folded into the 'german' Stemmer. " + + "Replace all usages of 'german2' with 'german'." + ); + return new SnowballFilter(tokenStream, new GermanStemmer()); } else if ("light_german".equalsIgnoreCase(language) || "lightGerman".equalsIgnoreCase(language)) { return new GermanLightStemFilter(tokenStream); } else if ("minimal_german".equalsIgnoreCase(language) || "minimalGerman".equalsIgnoreCase(language)) { @@ -231,10 +244,13 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory { // Norwegian (Nynorsk) stemmers } else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) { - return new NorwegianLightStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); + NorwegianLightStemFilterFactory factory = new NorwegianLightStemFilterFactory(Collections.singletonMap("variant", "nn")); + return factory.create(tokenStream); } else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) { - return new NorwegianMinimalStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK); - + NorwegianMinimalStemFilterFactory factory = new NorwegianMinimalStemFilterFactory( + Collections.singletonMap("variant", "nn") + ); + return factory.create(tokenStream); // Persian stemmers } else if ("persian".equalsIgnoreCase(language)) { return new PersianStemFilter(tokenStream); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java index b406fa833577..0d936666e92c 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java @@ -278,7 +278,7 @@ public class HighlighterWithAnalyzersTests extends ESIntegTestCase { boolQuery().should(matchPhrasePrefixQuery("field1", "test")).should(matchPhrasePrefixQuery("field1", "bro")) ).highlighter(highlight().field("field1").order("score").preTags("").postTags("")), resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)); for (int i = 0; i < 2; i++) { assertHighlight( resp, diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java new file mode 100644 index 000000000000..7b962538c2a1 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/PersianAnalyzerProviderTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.index.IndexVersionUtils; + +import java.io.IOException; + +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; + +/** + * Tests Persian Analyzer factory and behavioural changes with Lucene 10 + */ +public class PersianAnalyzerProviderTests extends ESTokenStreamTestCase { + + public void testPersianAnalyzerPostLucene10() throws IOException { + IndexVersion postLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.UPGRADE_TO_LUCENE_10_0_0, + IndexVersion.current() + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, postLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + PersianAnalyzerProvider persianAnalyzerProvider = new PersianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = persianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "من کتاب های زیادی خوانده ام", new String[] { "كتاب", "زياد", "خوانده" }); + } + + public void testPersianAnalyzerPreLucene10() throws IOException { + IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersionUtils.getFirstVersion(), + IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, preLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + PersianAnalyzerProvider persianAnalyzerProvider = new PersianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = persianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "من کتاب های زیادی خوانده ام", new String[] { "كتاب", "زيادي", "خوانده" }); + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java new file mode 100644 index 000000000000..1af44bc71f35 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/RomanianAnalyzerTests.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.Analyzer; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.index.IndexVersionUtils; + +import java.io.IOException; + +import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo; + +/** + * Verifies the behavior of Romanian analyzer. + */ +public class RomanianAnalyzerTests extends ESTokenStreamTestCase { + + public void testRomanianAnalyzerPostLucene10() throws IOException { + IndexVersion postLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersions.UPGRADE_TO_LUCENE_10_0_0, + IndexVersion.current() + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, postLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + RomanianAnalyzerProvider romanianAnalyzerProvider = new RomanianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = romanianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "absenţa", new String[] { "absenț" }); + assertAnalyzesTo(analyzer, "cunoştinţă", new String[] { "cunoștinț" }); + } + + public void testRomanianAnalyzerPreLucene10() throws IOException { + IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween( + random(), + IndexVersionUtils.getFirstVersion(), + IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0) + ); + Settings settings = ESTestCase.indexSettings(1, 1) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(IndexMetadata.SETTING_VERSION_CREATED, preLucene10Version) + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + Environment environment = new Environment(settings, null); + + RomanianAnalyzerProvider romanianAnalyzerProvider = new RomanianAnalyzerProvider( + idxSettings, + environment, + "my-analyzer", + Settings.EMPTY + ); + Analyzer analyzer = romanianAnalyzerProvider.get(); + assertAnalyzesTo(analyzer, "absenţa", new String[] { "absenţ" }); + assertAnalyzesTo(analyzer, "cunoştinţă", new String[] { "cunoştinţ" }); + } +} diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java index 8f3d52f0174c..bb06c221873b 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/StemmerTokenFilterFactoryTests.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.analysis.common; +import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.core.WhitespaceTokenizer; @@ -16,6 +17,7 @@ import org.apache.lucene.analysis.snowball.SnowballFilter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalysisTestsHelper; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -103,6 +105,42 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase { assertEquals("Invalid stemmer class specified: [english, light_english]", e.getMessage()); } + public void testGermanAndGerman2Stemmer() throws IOException { + IndexVersion v = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.UPGRADE_TO_LUCENE_10_0_0, IndexVersion.current()); + Analyzer analyzer = createGermanStemmer("german", v); + assertAnalyzesTo(analyzer, "Buecher Bücher", new String[] { "Buch", "Buch" }); + + analyzer = createGermanStemmer("german2", v); + assertAnalyzesTo(analyzer, "Buecher Bücher", new String[] { "Buch", "Buch" }); + assertWarnings( + "The 'german2' stemmer has been deprecated and folded into the 'german' Stemmer. " + + "Replace all usages of 'german2' with 'german'." + ); + } + + private static Analyzer createGermanStemmer(String variant, IndexVersion v) throws IOException { + + Settings settings = Settings.builder() + .put("index.analysis.filter.my_german.type", "stemmer") + .put("index.analysis.filter.my_german.language", variant) + .put("index.analysis.analyzer.my_german.tokenizer", "whitespace") + .put("index.analysis.analyzer.my_german.filter", "my_german") + .put(SETTING_VERSION_CREATED, v) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + + ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, PLUGIN); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_german"); + assertThat(tokenFilter, instanceOf(StemmerTokenFilterFactory.class)); + Tokenizer tokenizer = new WhitespaceTokenizer(); + tokenizer.setReader(new StringReader("Buecher oder Bücher")); + TokenStream create = tokenFilter.create(tokenizer); + assertThat(create, instanceOf(SnowballFilter.class)); + IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers; + NamedAnalyzer analyzer = indexAnalyzers.get("my_german"); + return analyzer; + } + public void testKpDeprecation() throws IOException { IndexVersion v = IndexVersionUtils.randomVersion(random()); Settings settings = Settings.builder() diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml index c03bdb311105..8930e485aa24 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/analysis-common/20_analyzers.yml @@ -901,6 +901,31 @@ - length: { tokens: 1 } - match: { tokens.0.token: خورد } +--- +"persian stemming": + - requires: + cluster_features: ["lucene_10_upgrade"] + reason: "test requires persian analyzer stemming capabilities that come with Lucene 10" + + - do: + indices.create: + index: test + body: + settings: + analysis: + analyzer: + my_analyzer: + type: persian + + - do: + indices.analyze: + index: test + body: + text: كتابها + analyzer: my_analyzer + - length: { tokens: 1 } + - match: { tokens.0.token: كتاب } + --- "portuguese": - do: @@ -948,7 +973,7 @@ text: absenţa analyzer: romanian - length: { tokens: 1 } - - match: { tokens.0.token: absenţ } + - match: { tokens.0.token: absenț } - do: indices.analyze: @@ -957,7 +982,7 @@ text: absenţa analyzer: my_analyzer - length: { tokens: 1 } - - match: { tokens.0.token: absenţ } + - match: { tokens.0.token: absenț } --- "russian": diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index 8f1c0cf515e1..cb74d6213781 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -24,7 +24,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.Build; @@ -440,13 +439,13 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic ? includeAutomaton : Operations.minus(includeAutomaton, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CharacterRunAutomaton(MinimizationOperations.minimize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); + return new CharacterRunAutomaton(Operations.determinize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton patternsToAutomaton(List patterns) { final List automata = patterns.stream().map(s -> { final String regex = s.replace(".", "\\.").replace("*", ".*"); - return new RegExp(regex).toAutomaton(); + return new RegExp(regex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); }).toList(); if (automata.isEmpty()) { return null; diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 8e7ecfa49f14..777ddc28fefd 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1706,7 +1706,7 @@ public class DataStreamIT extends ESIntegTestCase { assertResponse( prepareSearch("metrics-foo").addFetchField(new FieldAndFormat(DEFAULT_TIMESTAMP_FIELD, "epoch_millis")).setSize(totalDocs), resp -> { - assertEquals(totalDocs, resp.getHits().getTotalHits().value); + assertEquals(totalDocs, resp.getHits().getTotalHits().value()); SearchHit[] hits = resp.getHits().getHits(); assertEquals(totalDocs, hits.length); @@ -2027,7 +2027,7 @@ public class DataStreamIT extends ESIntegTestCase { static void verifyDocs(String dataStream, long expectedNumHits, List expectedIndices) { assertResponse(prepareSearch(dataStream).setSize((int) expectedNumHits), resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(expectedNumHits)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits)); Arrays.stream(resp.getHits().getHits()).forEach(hit -> assertTrue(expectedIndices.contains(hit.getIndex()))); }); } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 686e253d1d17..a2557a4de6e6 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -545,7 +545,7 @@ public class TSDBIndexingIT extends ESSingleNodeTestCase { var searchRequest = new SearchRequest(dataStreamName); searchRequest.source().trackTotalHits(true); assertResponse(client().search(searchRequest), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numBulkRequests * numDocsPerBulk)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numBulkRequests * numDocsPerBulk)); String id = searchResponse.getHits().getHits()[0].getId(); assertThat(id, notNullValue()); diff --git a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java index 6942cc3733d1..f8c8d2bd359f 100644 --- a/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java +++ b/modules/ingest-geoip/src/internalClusterTest/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderIT.java @@ -256,8 +256,8 @@ public class GeoIpDownloaderIT extends AbstractGeoIpIT { res -> { try { TotalHits totalHits = res.getHits().getTotalHits(); - assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation); - assertEquals(size, totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation()); + assertEquals(size, totalHits.value()); assertEquals(size, res.getHits().getHits().length); List data = new ArrayList<>(); diff --git a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java index 570c2a5f3783..df6780aba722 100644 --- a/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java +++ b/modules/lang-expression/src/internalClusterTest/java/org/elasticsearch/script/expression/MoreExpressionIT.java @@ -81,7 +81,7 @@ public class MoreExpressionIT extends ESIntegTestCase { ensureGreen("test"); prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertResponse(buildRequest("doc['foo'] + 1"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -91,7 +91,7 @@ public class MoreExpressionIT extends ESIntegTestCase { ensureGreen("test"); prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertNoFailuresAndResponse(buildRequest("doc['foo'] + abs(1)"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -102,7 +102,7 @@ public class MoreExpressionIT extends ESIntegTestCase { prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get(); assertResponse(buildRequest("doc['foo'].value + 1"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } @@ -125,7 +125,7 @@ public class MoreExpressionIT extends ESIntegTestCase { assertResponse(req, rsp -> { assertNoFailures(rsp); SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals("1", hits.getAt(0).getId()); assertEquals("3", hits.getAt(1).getId()); assertEquals("2", hits.getAt(2).getId()); @@ -148,25 +148,25 @@ public class MoreExpressionIT extends ESIntegTestCase { prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); assertResponse(buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date0'].getHourOfDay() + doc['date1'].getDayOfMonth()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].getMonth() + 1"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(9.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(10.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].getYear()"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D); @@ -182,25 +182,25 @@ public class MoreExpressionIT extends ESIntegTestCase { prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z") ); assertResponse(buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date0'].date.getHourOfDay() + doc['date1'].date.dayOfMonth"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].date.monthOfYear + 1"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(10.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(11.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); assertResponse(buildRequest("doc['date1'].date.year"), rsp -> { - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); SearchHits hits = rsp.getHits(); assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D); @@ -238,7 +238,7 @@ public class MoreExpressionIT extends ESIntegTestCase { assertNoFailuresAndResponse(buildRequest("doc['double0'].count() + doc['double1'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(2.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -246,7 +246,7 @@ public class MoreExpressionIT extends ESIntegTestCase { assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(7.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(6.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -254,7 +254,7 @@ public class MoreExpressionIT extends ESIntegTestCase { assertNoFailuresAndResponse(buildRequest("doc['double0'].avg() + doc['double1'].avg()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(4.3, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(8.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -262,7 +262,7 @@ public class MoreExpressionIT extends ESIntegTestCase { assertNoFailuresAndResponse(buildRequest("doc['double0'].median()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(1.25, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -270,7 +270,7 @@ public class MoreExpressionIT extends ESIntegTestCase { assertNoFailuresAndResponse(buildRequest("doc['double0'].min()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(-1.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -278,7 +278,7 @@ public class MoreExpressionIT extends ESIntegTestCase { assertNoFailuresAndResponse(buildRequest("doc['double0'].max()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -286,7 +286,7 @@ public class MoreExpressionIT extends ESIntegTestCase { assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()/doc['double0'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(1.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -295,7 +295,7 @@ public class MoreExpressionIT extends ESIntegTestCase { // make sure count() works for missing assertNoFailuresAndResponse(buildRequest("doc['double2'].count()"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(0.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(0.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -304,7 +304,7 @@ public class MoreExpressionIT extends ESIntegTestCase { // make sure .empty works in the same way assertNoFailuresAndResponse(buildRequest("doc['double2'].empty ? 5.0 : 2.0"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -342,7 +342,7 @@ public class MoreExpressionIT extends ESIntegTestCase { ); assertNoFailuresAndResponse(buildRequest("doc['x'] + 1"), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(2, rsp.getHits().getTotalHits().value); + assertEquals(2, rsp.getHits().getTotalHits().value()); assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(1.0, hits.getAt(1).field("foo").getValue(), 0.0D); }); @@ -378,7 +378,7 @@ public class MoreExpressionIT extends ESIntegTestCase { String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)"; assertResponse(buildRequest(script, "a", 2, "b", 3.5, "c", 5000000000L), rsp -> { SearchHits hits = rsp.getHits(); - assertEquals(3, hits.getTotalHits().value); + assertEquals(3, hits.getTotalHits().value()); assertEquals(24.5, hits.getAt(0).field("foo").getValue(), 0.0D); assertEquals(9.5, hits.getAt(1).field("foo").getValue(), 0.0D); assertEquals(13.5, hits.getAt(2).field("foo").getValue(), 0.0D); @@ -501,7 +501,7 @@ public class MoreExpressionIT extends ESIntegTestCase { ); assertResponse(req, rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); Stats stats = rsp.getAggregations().get("int_agg"); assertEquals(39.0, stats.getMax(), 0.0001); @@ -655,22 +655,22 @@ public class MoreExpressionIT extends ESIntegTestCase { refresh(); // access .lat assertNoFailuresAndResponse(buildRequest("doc['location'].lat"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // access .lon assertNoFailuresAndResponse(buildRequest("doc['location'].lon"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // access .empty assertNoFailuresAndResponse(buildRequest("doc['location'].empty ? 1 : 0"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); }); // call haversin assertNoFailuresAndResponse(buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)"), rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D); }); } @@ -693,14 +693,14 @@ public class MoreExpressionIT extends ESIntegTestCase { ); // access .value assertNoFailuresAndResponse(buildRequest("doc['vip'].value"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); }); // access .empty assertNoFailuresAndResponse(buildRequest("doc['vip'].empty ? 1 : 0"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(1.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); @@ -708,7 +708,7 @@ public class MoreExpressionIT extends ESIntegTestCase { // ternary operator // vip's have a 50% discount assertNoFailuresAndResponse(buildRequest("doc['vip'] ? doc['price']/2 : doc['price']"), rsp -> { - assertEquals(3, rsp.getHits().getTotalHits().value); + assertEquals(3, rsp.getHits().getTotalHits().value()); assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D); assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D); assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D); @@ -727,7 +727,7 @@ public class MoreExpressionIT extends ESIntegTestCase { Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap()); builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script))); assertNoFailuresAndResponse(builder, rsp -> { - assertEquals(1, rsp.getHits().getTotalHits().value); + assertEquals(1, rsp.getHits().getTotalHits().value()); assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D); }); } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java index 0952ff8fe856..bb714d4674ed 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionDoubleValuesScript.java @@ -17,6 +17,8 @@ import org.apache.lucene.search.Rescorer; import org.apache.lucene.search.SortField; import org.elasticsearch.script.DoubleValuesScript; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.function.Function; /** @@ -37,12 +39,20 @@ public class ExpressionDoubleValuesScript implements DoubleValuesScript.Factory return new DoubleValuesScript() { @Override public double execute() { - return exprScript.evaluate(new DoubleValues[0]); + try { + return exprScript.evaluate(new DoubleValues[0]); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } @Override public double evaluate(DoubleValues[] functionValues) { - return exprScript.evaluate(functionValues); + try { + return exprScript.evaluate(functionValues); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } @Override diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java index b306f104d7ba..58cd9ea293ae 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java @@ -24,7 +24,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.script.AggregationScript; import org.elasticsearch.script.BucketAggregationScript; import org.elasticsearch.script.BucketAggregationSelectorScript; -import org.elasticsearch.script.ClassPermission; import org.elasticsearch.script.DoubleValuesScript; import org.elasticsearch.script.FieldScript; import org.elasticsearch.script.FilterScript; @@ -36,9 +35,8 @@ import org.elasticsearch.script.ScriptException; import org.elasticsearch.script.TermsSetQueryScript; import org.elasticsearch.search.lookup.SearchLookup; -import java.security.AccessControlContext; -import java.security.AccessController; -import java.security.PrivilegedAction; +import java.io.IOException; +import java.io.UncheckedIOException; import java.text.ParseException; import java.util.ArrayList; import java.util.HashMap; @@ -156,36 +154,14 @@ public class ExpressionScriptEngine implements ScriptEngine { @Override public T compile(String scriptName, String scriptSource, ScriptContext context, Map params) { - // classloader created here - final SecurityManager sm = System.getSecurityManager(); SpecialPermission.check(); - Expression expr = AccessController.doPrivileged(new PrivilegedAction() { - @Override - public Expression run() { - try { - // snapshot our context here, we check on behalf of the expression - AccessControlContext engineContext = AccessController.getContext(); - ClassLoader loader = getClass().getClassLoader(); - if (sm != null) { - loader = new ClassLoader(loader) { - @Override - protected Class loadClass(String name, boolean resolve) throws ClassNotFoundException { - try { - engineContext.checkPermission(new ClassPermission(name)); - } catch (SecurityException e) { - throw new ClassNotFoundException(name, e); - } - return super.loadClass(name, resolve); - } - }; - } - // NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here - return JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS, loader); - } catch (ParseException e) { - throw convertToScriptException("compile error", scriptSource, scriptSource, e); - } - } - }); + Expression expr; + try { + // NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here + expr = JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS); + } catch (ParseException e) { + throw convertToScriptException("compile error", scriptSource, scriptSource, e); + } if (contexts.containsKey(context) == false) { throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]"); } @@ -233,7 +209,11 @@ public class ExpressionScriptEngine implements ScriptEngine { placeholder.setValue(((Number) value).doubleValue()); } }); - return expr.evaluate(functionValuesArray); + try { + return expr.evaluate(functionValuesArray); + } catch (IOException e) { + throw new UncheckedIOException(e); + } } }; }; diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index 3efcfde684eb..a3c0c60d7543 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -138,7 +138,7 @@ public class SearchTemplateResponseTests extends AbstractXContentTestCase state = new HashMap<>(); Scorable scorer = new Scorable() { - @Override - public int docID() { - return 0; - } - @Override public float score() { return 0.5f; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java index 01a9e995450a..7edd6d530325 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/SimilarityScriptTests.java @@ -85,7 +85,7 @@ public class SimilarityScriptTests extends ScriptTestCase { 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); } } @@ -134,7 +134,7 @@ public class SimilarityScriptTests extends ScriptTestCase { 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0); } } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java index 19173c650c24..1c6ffe75e3fd 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/RankFeaturesMapperIntegrationIT.java @@ -43,7 +43,7 @@ public class RankFeaturesMapperIntegrationIT extends ESIntegTestCase { assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); for (SearchHit hit : searchResponse.getHits().getHits()) { assertThat(hit.getScore(), equalTo(20f)); } @@ -52,7 +52,7 @@ public class RankFeaturesMapperIntegrationIT extends ESIntegTestCase { assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); for (SearchHit hit : searchResponse.getHits().getHits()) { assertThat(hit.getScore(), equalTo(2000f)); } @@ -67,7 +67,7 @@ public class RankFeaturesMapperIntegrationIT extends ESIntegTestCase { .minimumShouldMatch(1) ), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); for (SearchHit hit : searchResponse.getHits().getHits()) { if (hit.getId().equals("all")) { assertThat(hit.getScore(), equalTo(50f)); @@ -83,7 +83,7 @@ public class RankFeaturesMapperIntegrationIT extends ESIntegTestCase { ); assertNoFailuresAndResponse( prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(0L)) ); } diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java index 4fc4fc69e0ee..97c97a643e9c 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/TokenCountFieldMapperIntegrationIT.java @@ -203,7 +203,7 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase { } private void assertSearchReturns(SearchResponse result, String... ids) { - assertThat(result.getHits().getTotalHits().value, equalTo((long) ids.length)); + assertThat(result.getHits().getTotalHits().value(), equalTo((long) ids.length)); assertThat(result.getHits().getHits().length, equalTo(ids.length)); List foundIds = new ArrayList<>(); for (SearchHit hit : result.getHits()) { diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java index bce6ffb5e0ea..f277d28eed92 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SearchAsYouTypeFieldMapper.java @@ -468,8 +468,8 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper { } Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = method == null - ? new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + ? new AutomatonQuery(new Term(name(), value + "*"), automaton, false) + : new AutomatonQuery(new Term(name(), value + "*"), automaton, false, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField, value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java index d16034c5de2f..a992f68d93d9 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQuery.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.search.TwoPhaseIterator; @@ -266,7 +267,7 @@ public final class SourceConfirmedTextQuery extends Query { @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { - RuntimePhraseScorer scorer = scorer(context); + RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0); if (scorer == null) { return Explanation.noMatch("No matching phrase"); } @@ -286,15 +287,26 @@ public final class SourceConfirmedTextQuery extends Query { } @Override - public RuntimePhraseScorer scorer(LeafReaderContext context) throws IOException { - final Scorer approximationScorer = approximationWeight != null ? approximationWeight.scorer(context) : null; - if (approximationScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier approximationSupplier = approximationWeight != null ? approximationWeight.scorerSupplier(context) : null; + if (approximationSupplier == null) { return null; } - final DocIdSetIterator approximation = approximationScorer.iterator(); - final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); - final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); - return new RuntimePhraseScorer(this, approximation, leafSimScorer, valueFetcher, field, in); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + final Scorer approximationScorer = approximationSupplier.get(leadCost); + final DocIdSetIterator approximation = approximationScorer.iterator(); + final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores()); + final CheckedIntFunction, IOException> valueFetcher = valueFetcherProvider.apply(context); + return new RuntimePhraseScorer(approximation, leafSimScorer, valueFetcher, field, in); + } + + @Override + public long cost() { + return approximationSupplier.cost(); + } + }; } @Override @@ -310,7 +322,7 @@ public final class SourceConfirmedTextQuery extends Query { Weight innerWeight = in.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, 1); return innerWeight.matches(context, doc); } - RuntimePhraseScorer scorer = scorer(context); + RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0L); if (scorer == null) { return null; } @@ -336,14 +348,12 @@ public final class SourceConfirmedTextQuery extends Query { private float freq; private RuntimePhraseScorer( - Weight weight, DocIdSetIterator approximation, LeafSimScorer scorer, CheckedIntFunction, IOException> valueFetcher, String field, Query query ) { - super(weight); this.scorer = scorer; this.valueFetcher = valueFetcher; this.field = field; diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java index 922b92263d71..1eb6083cfe45 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/MatchOnlyTextFieldMapperTests.java @@ -89,8 +89,8 @@ public class MatchOnlyTextFieldMapperTests extends MapperTestCase { SearchExecutionContext context = createSearchExecutionContext(mapperService, newSearcher(reader)); MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "brown fox"); TopDocs docs = context.searcher().search(queryBuilder.toQuery(context), 1); - assertThat(docs.totalHits.value, equalTo(1L)); - assertThat(docs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(docs.totalHits.value(), equalTo(1L)); + assertThat(docs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(docs.scoreDocs[0].doc, equalTo(0)); } } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java index 84139409e8bc..a49e0c2a3e38 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceConfirmedTextQueryTests.java @@ -61,7 +61,7 @@ public class SourceConfirmedTextQueryTests extends ESTestCase { private static final IOFunction, IOException>> SOURCE_FETCHER_PROVIDER = context -> docID -> { sourceFetchCount.incrementAndGet(); - return Collections.singletonList(context.reader().document(docID).get("body")); + return Collections.singletonList(context.reader().storedFields().document(docID).get("body")); }; public void testTerm() throws Exception { diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java index 0fef801b2200..2befcfb57601 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/SourceIntervalsSourceTests.java @@ -41,7 +41,7 @@ import java.util.List; public class SourceIntervalsSourceTests extends ESTestCase { private static final IOFunction, IOException>> SOURCE_FETCHER_PROVIDER = - context -> docID -> Collections.singletonList(context.reader().document(docID).get("body")); + context -> docID -> Collections.singletonList(context.reader().storedFields().document(docID).get("body")); public void testIntervals() throws IOException { final FieldType ft = new FieldType(TextField.TYPE_STORED); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java index ad8e252e3fd6..9c0e5ce071dc 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/aggregations/ChildrenIT.java @@ -115,7 +115,7 @@ public class ChildrenIT extends AbstractParentChildTestCase { logger.info("bucket={}", bucket.getKey()); Children childrenBucket = bucket.getAggregations().get("to_comment"); TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - logger.info("total_hits={}", topHits.getHits().getTotalHits().value); + logger.info("total_hits={}", topHits.getHits().getTotalHits().value()); for (SearchHit searchHit : topHits.getHits()) { logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId()); } @@ -129,7 +129,7 @@ public class ChildrenIT extends AbstractParentChildTestCase { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(2L)); TopHits topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(2L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(2L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("e")); assertThat(topHits.getHits().getAt(1).getId(), equalTo("f")); @@ -141,7 +141,7 @@ public class ChildrenIT extends AbstractParentChildTestCase { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(1L)); topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); categoryBucket = categoryTerms.getBucketByKey("c"); @@ -152,7 +152,7 @@ public class ChildrenIT extends AbstractParentChildTestCase { assertThat(childrenBucket.getName(), equalTo("to_comment")); assertThat(childrenBucket.getDocCount(), equalTo(1L)); topHits = childrenBucket.getAggregations().get("top_comments"); - assertThat(topHits.getHits().getTotalHits().value, equalTo(1L)); + assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L)); assertThat(topHits.getHits().getAt(0).getId(), equalTo("f")); } ); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java index 872165014f5a..cce0ef06cbf6 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/ChildQuerySearchIT.java @@ -107,7 +107,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -117,7 +117,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); } ); @@ -127,7 +127,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); } ); @@ -135,7 +135,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); } ); @@ -143,7 +143,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("gc1")); } ); @@ -161,7 +161,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } ); @@ -182,7 +182,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // TEST FETCHING _parent from child assertNoFailuresAndResponse(prepareSearch("test").setQuery(idsQuery().addIds("c1")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c1")); assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); @@ -195,7 +195,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2"))); assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child")); assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1")); @@ -208,7 +208,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // HAS CHILD assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")), response -> { assertHitCount(response, 1L); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); }); @@ -307,8 +307,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase { ).setSize(numChildDocsPerParent), response -> { Set childIds = parentToChildrenEntry.getValue(); - assertThat(response.getHits().getTotalHits().value, equalTo((long) childIds.size())); - for (int i = 0; i < response.getHits().getTotalHits().value; i++) { + assertThat(response.getHits().getTotalHits().value(), equalTo((long) childIds.size())); + for (int i = 0; i < response.getHits().getTotalHits().value(); i++) { assertThat(childIds.remove(response.getHits().getAt(i).getId()), is(true)); assertThat(response.getHits().getAt(i).getScore(), is(1.0f)); } @@ -341,7 +341,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -349,7 +349,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); } ); @@ -357,7 +357,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); } @@ -367,7 +367,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -375,7 +375,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); } ); @@ -383,7 +383,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); } @@ -426,7 +426,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1"))); @@ -458,7 +458,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -472,7 +472,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1_updated\"")); } @@ -647,7 +647,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -667,7 +667,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -687,7 +687,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -707,7 +707,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { ) ).addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("16")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(5f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("17")); @@ -768,7 +768,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); } ); @@ -778,7 +778,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); } ); @@ -801,7 +801,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); SearchHit[] searchHits = response.getHits().getHits()[0].getInnerHits().get("child").getHits(); assertThat(searchHits.length, equalTo(1)); @@ -888,7 +888,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { .addSort("p_field", SortOrder.ASC) .setSize(5), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(10L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(10L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("p000")); assertThat(response.getHits().getHits()[1].getId(), equalTo("p001")); assertThat(response.getHits().getHits()[2].getId(), equalTo("p002")); @@ -903,7 +903,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { .addSort("c_field", SortOrder.ASC) .setSize(5), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(500L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(500L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("c000")); assertThat(response.getHits().getHits()[1].getId(), equalTo("c001")); assertThat(response.getHits().getHits()[2].getId(), equalTo("c002")); @@ -932,7 +932,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -943,7 +943,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), equalTo("c3")); assertThat(response.getHits().getAt(1).getId(), equalTo("c4")); } @@ -961,7 +961,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p1")); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\"")); } @@ -972,7 +972,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); assertThat(response.getHits().getAt(1).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4"))); } @@ -996,7 +996,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total)).setMinScore(3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("p2")); assertThat(response.getHits().getAt(0).getScore(), equalTo(3.0f)); } @@ -1411,7 +1411,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { 10, (respNum, response) -> { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(10L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(10L)); } ); } @@ -1469,7 +1469,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // Score mode = NONE assertResponse(minMaxQuery(ScoreMode.None, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1479,7 +1479,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.None, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("4")); @@ -1487,7 +1487,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.None, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); }); @@ -1495,7 +1495,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertHitCount(minMaxQuery(ScoreMode.None, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.None, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1505,7 +1505,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.None, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1515,7 +1515,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.None, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("2")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1523,7 +1523,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.None, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f)); }); @@ -1533,7 +1533,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // Score mode = SUM assertResponse(minMaxQuery(ScoreMode.Total, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1543,7 +1543,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Total, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1551,7 +1551,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Total, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); }); @@ -1559,7 +1559,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertHitCount(minMaxQuery(ScoreMode.Total, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Total, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1569,7 +1569,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Total, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1579,7 +1579,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Total, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1587,7 +1587,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Total, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); }); @@ -1597,7 +1597,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // Score mode = MAX assertResponse(minMaxQuery(ScoreMode.Max, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1607,7 +1607,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Max, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1615,7 +1615,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Max, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); }); @@ -1623,7 +1623,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertHitCount(minMaxQuery(ScoreMode.Max, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Max, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1633,7 +1633,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Max, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1643,7 +1643,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Max, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1651,7 +1651,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Max, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); }); @@ -1661,7 +1661,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { // Score mode = AVG assertResponse(minMaxQuery(ScoreMode.Avg, 1, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1671,7 +1671,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Avg, 2, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1679,7 +1679,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Avg, 3, null), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); }); @@ -1687,7 +1687,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { assertHitCount(minMaxQuery(ScoreMode.Avg, 4, null), 0L); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 4), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1697,7 +1697,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 3), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("4")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -1707,7 +1707,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Avg, 1, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); assertThat(response.getHits().getHits()[1].getId(), equalTo("2")); @@ -1715,7 +1715,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase { }); assertResponse(minMaxQuery(ScoreMode.Avg, 2, 2), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits()[0].getId(), equalTo("3")); assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f)); }); diff --git a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java index 0ae10b297f70..6d6072b2992c 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/elasticsearch/join/query/InnerHitsIT.java @@ -128,7 +128,7 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getAt(0).getId(), equalTo("c1")); assertThat(innerHits.getAt(1).getId(), equalTo("c2")); @@ -148,7 +148,7 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); + assertThat(innerHits.getTotalHits().value(), equalTo(3L)); assertThat(innerHits.getAt(0).getId(), equalTo("c4")); assertThat(innerHits.getAt(1).getId(), equalTo("c5")); @@ -280,7 +280,7 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(searchHit.getShard(), notNullValue()); SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent])); + assertThat(inner.getTotalHits().value(), equalTo((long) child1InnerObjects[parent])); for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child); @@ -290,7 +290,7 @@ public class InnerHitsIT extends ParentChildTestCase { offset1 += child1InnerObjects[parent]; inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent])); + assertThat(inner.getTotalHits().value(), equalTo((long) child2InnerObjects[parent])); for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) { SearchHit innerHit = inner.getAt(child); String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child); @@ -347,12 +347,12 @@ public class InnerHitsIT extends ParentChildTestCase { SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.getId(), equalTo("3")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L)); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1")); searchHit = response.getHits().getAt(1); assertThat(searchHit.getId(), equalTo("4")); - assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L)); + assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L)); assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2")); } ); @@ -394,11 +394,11 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("3")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("5")); } ); @@ -417,11 +417,11 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("4")); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("6")); } ); @@ -482,34 +482,34 @@ public class InnerHitsIT extends ParentChildTestCase { assertThat(response.getHits().getAt(0).getId(), equalTo("duke")); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls"); - assertThat(innerHits.getTotalHits().value, equalTo(4L)); + assertThat(innerHits.getTotalHits().value(), equalTo(4L)); assertThat(innerHits.getAt(0).getId(), equalTo("earl1")); assertThat(innerHits.getAt(1).getId(), equalTo("earl2")); assertThat(innerHits.getAt(2).getId(), equalTo("earl3")); assertThat(innerHits.getAt(3).getId(), equalTo("earl4")); SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1")); innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2")); innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3")); innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4")); innerHits = response.getHits().getAt(0).getInnerHits().get("princes"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getAt(0).getId(), equalTo("prince")); innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings"); - assertThat(innerInnerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerInnerHits.getAt(0).getId(), equalTo("king")); } ); @@ -532,12 +532,12 @@ public class InnerHitsIT extends ParentChildTestCase { response -> { assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); - assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1")); } @@ -549,7 +549,7 @@ public class InnerHitsIT extends ParentChildTestCase { assertResponse(prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC), response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2")); }); diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java index 258cbe743d7d..60412179807a 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/aggregations/ParentJoinAggregator.java @@ -102,7 +102,7 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements public void collect(int docId, long owningBucketOrd) throws IOException { if (parentDocs.get(docId) && globalOrdinals.advanceExact(docId)) { int globalOrdinal = (int) globalOrdinals.nextOrd(); - assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS; + assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1; collectionStrategy.add(owningBucketOrd, globalOrdinal); } } @@ -134,11 +134,6 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements public float score() { return 1f; } - - @Override - public int docID() { - return childDocsIter.docID(); - } }); final Bits liveDocs = ctx.reader().getLiveDocs(); @@ -150,7 +145,7 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements continue; } int globalOrdinal = (int) globalOrdinals.nextOrd(); - assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS; + assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1; /* * Check if we contain every ordinal. It's almost certainly be * faster to replay all the matching ordinals and filter them down diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java index 9ecf4ed821e2..6b00e94431be 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java @@ -20,8 +20,8 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopFieldCollectorManager; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; @@ -137,12 +137,12 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder { TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, null, Integer.MAX_VALUE, false).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + topDocsCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false).newCollector(); maxScoreCollector = new MaxScoreCollector(); } for (LeafReaderContext ctx : this.context.searcher().getIndexReader().leaves()) { diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java index 03a1677e60f4..707fcc822665 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ChildrenToParentAggregatorTests.java @@ -107,7 +107,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase { // verify for each children for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId("child0_" + parent)), indexReader, aggregation -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId("child0_" + parent))), indexReader, aggregation -> { assertEquals( "Expected one result for min-aggregation for parent: " + parent + ", but had aggregation-results: " + aggregation, 1, diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java index 91ec0e3c6769..ca90b0e588b1 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/aggregations/ParentToChildrenAggregatorTests.java @@ -104,7 +104,7 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase { }); for (String parent : expectedParentChildRelations.keySet()) { - testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(parent)), indexReader, child -> { + testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(parent))), indexReader, child -> { assertEquals((long) expectedParentChildRelations.get(parent).v1(), child.getDocCount()); assertEquals( expectedParentChildRelations.get(parent).v2(), diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index d4fe49ec8c77..9244f815cd95 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -54,6 +54,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; @@ -341,13 +342,13 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase percolatorQueries = queryStore.getQueries(leafReaderContext); + ScorerSupplier verifiedDocsScorer; if (scoreMode.needsScores()) { - return new BaseScorer(this, approximation) { - - float score; - - @Override - boolean matchDocId(int docId) throws IOException { - Query query = percolatorQueries.apply(docId); - if (query != null) { - if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder().add(query, Occur.MUST) - .add(nonNestedDocsFilter, Occur.FILTER) - .build(); - } - TopDocs topDocs = percolatorIndexSearcher.search(query, 1); - if (topDocs.scoreDocs.length > 0) { - score = topDocs.scoreDocs[0].score; - return true; - } else { - return false; - } - } else { - return false; - } - } - - @Override - public float score() { - return score; - } - }; + verifiedDocsScorer = null; } else { - ScorerSupplier verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext); - Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); - return new BaseScorer(this, approximation) { - - @Override - public float score() throws IOException { - return 0f; - } - - boolean matchDocId(int docId) throws IOException { - // We use the verifiedDocsBits to skip the expensive MemoryIndex verification. - // If docId also appears in the verifiedDocsBits then that means during indexing - // we were able to extract all query terms and for this candidate match - // and we determined based on the nature of the query that it is safe to skip - // the MemoryIndex verification. - if (verifiedDocsBits.get(docId)) { - return true; - } - Query query = percolatorQueries.apply(docId); - if (query == null) { - return false; - } - if (nonNestedDocsFilter != null) { - query = new BooleanQuery.Builder().add(query, Occur.MUST).add(nonNestedDocsFilter, Occur.FILTER).build(); - } - return Lucene.exists(percolatorIndexSearcher, query); - } - }; + verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext); } + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + final Scorer approximation = approximationSupplier.get(leadCost); + final CheckedFunction percolatorQueries = queryStore.getQueries(leafReaderContext); + if (scoreMode.needsScores()) { + return new BaseScorer(approximation) { + + float score; + + @Override + boolean matchDocId(int docId) throws IOException { + Query query = percolatorQueries.apply(docId); + if (query != null) { + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder().add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } + TopDocs topDocs = percolatorIndexSearcher.search(query, 1); + if (topDocs.scoreDocs.length > 0) { + score = topDocs.scoreDocs[0].score; + return true; + } else { + return false; + } + } else { + return false; + } + } + + @Override + public float score() { + return score; + } + }; + } else { + Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer); + return new BaseScorer(approximation) { + + @Override + public float score() throws IOException { + return 0f; + } + + boolean matchDocId(int docId) throws IOException { + // We use the verifiedDocsBits to skip the expensive MemoryIndex verification. + // If docId also appears in the verifiedDocsBits then that means during indexing + // we were able to extract all query terms and for this candidate match + // and we determined based on the nature of the query that it is safe to skip + // the MemoryIndex verification. + if (verifiedDocsBits.get(docId)) { + return true; + } + Query query = percolatorQueries.apply(docId); + if (query == null) { + return false; + } + if (nonNestedDocsFilter != null) { + query = new BooleanQuery.Builder().add(query, Occur.MUST) + .add(nonNestedDocsFilter, Occur.FILTER) + .build(); + } + return Lucene.exists(percolatorIndexSearcher, query); + } + }; + } + } + + @Override + public long cost() { + return approximationSupplier.cost(); + } + }; } @Override @@ -265,8 +284,7 @@ final class PercolateQuery extends Query implements Accountable { final Scorer approximation; - BaseScorer(Weight weight, Scorer approximation) { - super(weight); + BaseScorer(Scorer approximation) { this.approximation = approximation; } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 409b6fd70c3c..d6422efdfed2 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -294,7 +294,7 @@ public class PercolatorFieldMapper extends FieldMapper { List extractedTerms = t.v1(); Map> encodedPointValuesByField = t.v2(); // `1 + ` is needed to take into account the EXTRACTION_FAILED should clause - boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= BooleanQuery + boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= IndexSearcher .getMaxClauseCount(); List subQueries = new ArrayList<>(); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index c36374685668..8413b564c204 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -91,7 +91,7 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase { query = percolatorIndexSearcher.rewrite(query); int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc(); TopDocs topDocs = percolatorIndexSearcher.search(query, memoryIndexMaxDoc, new Sort(SortField.FIELD_DOC)); - if (topDocs.totalHits.value == 0) { + if (topDocs.totalHits.value() == 0) { // This hit didn't match with a percolate query, // likely to happen when percolating multiple documents continue; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index da4b10956dcf..0e9aa6de3a0c 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -8,7 +8,6 @@ */ package org.elasticsearch.percolator; -import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.Term; import org.apache.lucene.queries.spans.SpanOrQuery; import org.apache.lucene.queries.spans.SpanTermQuery; @@ -26,12 +25,15 @@ import org.apache.lucene.search.SynonymQuery; import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.automaton.ByteRunAutomaton; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.index.query.DateRangeIncludingNowQuery; import org.elasticsearch.lucene.queries.BlendedTermQuery; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -162,7 +164,7 @@ final class QueryAnalyzer { int minimumShouldMatchValue = 0; if (parent instanceof BooleanQuery bq) { if (bq.getMinimumNumberShouldMatch() == 0 - && bq.clauses().stream().anyMatch(c -> c.getOccur() == Occur.MUST || c.getOccur() == Occur.FILTER)) { + && bq.clauses().stream().anyMatch(c -> c.occur() == Occur.MUST || c.occur() == Occur.FILTER)) { return QueryVisitor.EMPTY_VISITOR; } minimumShouldMatchValue = bq.getMinimumNumberShouldMatch(); @@ -198,11 +200,15 @@ final class QueryAnalyzer { @Override public void consumeTermsMatching(Query query, String field, Supplier automaton) { if (query instanceof TermInSetQuery q) { - PrefixCodedTerms.TermIterator ti = q.getTermData().iterator(); + BytesRefIterator bytesRefIterator = q.getBytesRefIterator(); BytesRef term; Set qe = new HashSet<>(); - while ((term = ti.next()) != null) { - qe.add(new QueryExtraction(new Term(field, term))); + try { + while ((term = bytesRefIterator.next()) != null) { + qe.add(new QueryExtraction(new Term(field, term))); + } + } catch (IOException e) { + throw new UncheckedIOException(e); } this.terms.add(new Result(true, qe, 1)); } else { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 31e893ace72f..ff321303b56c 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -31,6 +31,7 @@ import org.apache.lucene.index.MultiTerms; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.memory.MemoryIndex; @@ -56,6 +57,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.TermInSetQuery; @@ -246,15 +248,13 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { queryFunctions.add( () -> new TermInSetQuery( field1, - new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))) + List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1)))) ) ); queryFunctions.add( () -> new TermInSetQuery( field2, - new BytesRef(randomFrom(stringContent.get(field1))), - new BytesRef(randomFrom(stringContent.get(field1))) + List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1)))) ) ); // many iterations with boolean queries, which are the most complex queries to deal with when nested @@ -647,7 +647,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { v ); TopDocs topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); @@ -655,7 +655,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); @@ -663,7 +663,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(2, topDocs.scoreDocs[0].doc); @@ -671,7 +671,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(3, topDocs.scoreDocs[0].doc); @@ -679,7 +679,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(4, topDocs.scoreDocs[0].doc); @@ -690,7 +690,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { percolateSearcher = memoryIndex.createSearcher(); query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v); topDocs = shardSearcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(5, topDocs.scoreDocs[0].doc); } @@ -836,14 +836,14 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { IndexVersion.current() ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(3L, topDocs.totalHits.value); + assertEquals(3L, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); assertEquals(4, topDocs.scoreDocs[2].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(3L, topDocs.totalHits.value); + assertEquals(3L, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); @@ -875,7 +875,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { IndexVersion.current() ); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -931,15 +931,15 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { v ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); - assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); + assertThat(candidateQuery.clauses().get(0).query(), instanceOf(CoveringQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); @@ -947,10 +947,10 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { } // This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery: - int origMaxClauseCount = BooleanQuery.getMaxClauseCount(); + int origMaxClauseCount = IndexSearcher.getMaxClauseCount(); try (Directory directory = new ByteBuffersDirectory()) { final int maxClauseCount = 100; - BooleanQuery.setMaxClauseCount(maxClauseCount); + IndexSearcher.setMaxClauseCount(maxClauseCount); try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) { Document document = new Document(); for (int i = 0; i < maxClauseCount; i++) { @@ -970,22 +970,22 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { v ); BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); - assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); + assertThat(candidateQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class)); TopDocs topDocs = shardSearcher.search(query, 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(2, topDocs.scoreDocs.length); assertEquals(1, topDocs.scoreDocs[0].doc); assertEquals(2, topDocs.scoreDocs[1].doc); } } finally { - BooleanQuery.setMaxClauseCount(origMaxClauseCount); + IndexSearcher.setMaxClauseCount(origMaxClauseCount); } } @@ -1032,7 +1032,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(2L, topDocs.totalHits.value); + assertEquals(2L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); assertEquals(1, topDocs.scoreDocs[1].doc); } @@ -1066,7 +1066,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); @@ -1074,7 +1074,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { percolateSearcher = memoryIndex.createSearcher(); query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); memoryIndex = new MemoryIndex(); @@ -1082,7 +1082,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { percolateSearcher = memoryIndex.createSearcher(); query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1117,7 +1117,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v); TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC)); - assertEquals(1L, topDocs.totalHits.value); + assertEquals(1L, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs[0].doc); } @@ -1141,7 +1141,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { TopDocs controlTopDocs = shardSearcher.search(controlQuery, 100); try { - assertThat(topDocs.totalHits.value, equalTo(controlTopDocs.totalHits.value)); + assertThat(topDocs.totalHits.value(), equalTo(controlTopDocs.totalHits.value())); assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length)); for (int j = 0; j < topDocs.scoreDocs.length; j++) { assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc)); @@ -1164,12 +1164,13 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { logger.error("topDocs.scoreDocs[{}].doc={}", i, topDocs.scoreDocs[i].doc); logger.error("topDocs.scoreDocs[{}].score={}", i, topDocs.scoreDocs[i].score); } + StoredFields storedFields = shardSearcher.storedFields(); for (int i = 0; i < controlTopDocs.scoreDocs.length; i++) { logger.error("controlTopDocs.scoreDocs[{}].doc={}", i, controlTopDocs.scoreDocs[i].doc); logger.error("controlTopDocs.scoreDocs[{}].score={}", i, controlTopDocs.scoreDocs[i].score); // Additional stored information that is useful when debugging: - String queryToString = shardSearcher.doc(controlTopDocs.scoreDocs[i].doc).get("query_to_string"); + String queryToString = storedFields.document(controlTopDocs.scoreDocs[i].doc).get("query_to_string"); logger.error("controlTopDocs.scoreDocs[{}].query_to_string={}", i, queryToString); TermsEnum tenum = MultiTerms.getTerms(shardSearcher.getIndexReader(), fieldType.queryTermsField.name()).iterator(); @@ -1289,7 +1290,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { float _score[] = new float[] { boost }; DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc()); CheckedFunction leaf = queryStore.getQueries(context); @@ -1313,7 +1314,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { } } }; - return new Scorer(this) { + Scorer scorer = new Scorer() { @Override public int docID() { @@ -1335,6 +1336,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { return _score[0]; } }; + return new DefaultScorerSupplier(scorer); } @Override diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java index 075d4d429fb3..04a8105b5fb8 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryTests.java @@ -118,7 +118,7 @@ public class PercolateQueryTests extends ESTestCase { ) ); TopDocs topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); assertThat(topDocs.scoreDocs.length, equalTo(1)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); Explanation explanation = shardSearcher.explain(query, 0); @@ -137,7 +137,7 @@ public class PercolateQueryTests extends ESTestCase { ) ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); explanation = shardSearcher.explain(query, 1); @@ -166,7 +166,7 @@ public class PercolateQueryTests extends ESTestCase { ) ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(4L)); + assertThat(topDocs.totalHits.value(), equalTo(4L)); query = new PercolateQuery( "_name", @@ -178,7 +178,7 @@ public class PercolateQueryTests extends ESTestCase { new MatchNoDocsQuery("") ); topDocs = shardSearcher.search(query, 10); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(3)); explanation = shardSearcher.explain(query, 3); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 100cda66acdc..f72c68c6fd2e 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -23,6 +23,7 @@ import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.sandbox.search.CoveringQuery; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -417,10 +418,10 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { } public void testCreateCandidateQuery() throws Exception { - int origMaxClauseCount = BooleanQuery.getMaxClauseCount(); + int origMaxClauseCount = IndexSearcher.getMaxClauseCount(); try { final int maxClauseCount = 100; - BooleanQuery.setMaxClauseCount(maxClauseCount); + IndexSearcher.setMaxClauseCount(maxClauseCount); addQueryFieldMappings(); MemoryIndex memoryIndex = new MemoryIndex(false); @@ -435,8 +436,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { Tuple t = fieldType.createCandidateQuery(indexReader); assertTrue(t.v2()); assertEquals(2, t.v1().clauses().size()); - assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(CoveringQuery.class)); - assertThat(t.v1().clauses().get(1).getQuery(), instanceOf(TermQuery.class)); + assertThat(t.v1().clauses().get(0).query(), instanceOf(CoveringQuery.class)); + assertThat(t.v1().clauses().get(1).query(), instanceOf(TermQuery.class)); // Now push it over the edge, so that it falls back using TermInSetQuery memoryIndex.addField("field2", "value", new WhitespaceAnalyzer()); @@ -444,12 +445,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { t = fieldType.createCandidateQuery(indexReader); assertFalse(t.v2()); assertEquals(3, t.v1().clauses().size()); - TermInSetQuery terms = (TermInSetQuery) t.v1().clauses().get(0).getQuery(); - assertEquals(maxClauseCount - 1, terms.getTermData().size()); - assertThat(t.v1().clauses().get(1).getQuery().toString(), containsString(fieldName + ".range_field: { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertThat(hits[0].getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1))); diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index a9c3e09e7f4e..81427060615e 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -82,7 +82,7 @@ public class QueryAnalyzerTests extends ESTestCase { } public void testExtractQueryMetadata_termsQuery() { - TermInSetQuery termsQuery = new TermInSetQuery("_field", new BytesRef("_term1"), new BytesRef("_term2")); + TermInSetQuery termsQuery = new TermInSetQuery("_field", List.of(new BytesRef("_term1"), new BytesRef("_term2"))); Result result = analyze(termsQuery); assertThat(result.verified, is(true)); assertThat(result.minimumShouldMatch, equalTo(1)); diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java index a76ddf13e459..8b9433714124 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/index/reindex/CrossClusterReindexIT.java @@ -70,7 +70,7 @@ public class CrossClusterReindexIT extends AbstractMultiClustersTestCase { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -85,7 +85,7 @@ public class CrossClusterReindexIT extends AbstractMultiClustersTestCase { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -114,7 +114,7 @@ public class CrossClusterReindexIT extends AbstractMultiClustersTestCase { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } } @@ -146,7 +146,7 @@ public class CrossClusterReindexIT extends AbstractMultiClustersTestCase { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } @@ -162,7 +162,7 @@ public class CrossClusterReindexIT extends AbstractMultiClustersTestCase { final TotalHits totalHits = SearchResponseUtils.getTotalHits( client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000) ); - return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber; + return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber; })); } diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java index 4b960e97ce0e..d046ba881b5d 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/ReindexValidator.java @@ -12,7 +12,6 @@ package org.elasticsearch.reindex; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteRequest; @@ -96,7 +95,7 @@ public class ReindexValidator { return new CharacterRunAutomaton(Automata.makeEmpty()); } Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY)); - automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (Operations.isTotal(automaton)) { throw new IllegalArgumentException( "Refusing to start because whitelist " diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java index b924f8c31111..01459e2ff61b 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/remote/RemoteResponseParsers.java @@ -97,8 +97,8 @@ final class RemoteResponseParsers { HITS_PARSER.declareField(constructorArg(), (p, c) -> { if (p.currentToken() == XContentParser.Token.START_OBJECT) { final TotalHits totalHits = SearchHits.parseTotalHitsFragment(p); - assert totalHits.relation == TotalHits.Relation.EQUAL_TO; - return totalHits.value; + assert totalHits.relation() == TotalHits.Relation.EQUAL_TO; + return totalHits.value(); } else { // For BWC with nodes pre 7.0 return p.longValue(); diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java index 593d4b41df71..6c7718608964 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextFieldMapperTests.java @@ -242,7 +242,7 @@ public class AnnotatedTextFieldMapperTests extends MapperTestCase { withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), reader -> { LeafReader leaf = reader.leaves().get(0).reader(); - Terms terms = leaf.getTermVector(0, "field"); + Terms terms = leaf.termVectors().get(0, "field"); TermsEnum iterator = terms.iterator(); BytesRef term; Set foundTerms = new HashSet<>(); diff --git a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java index 61abd64e98a9..d4c4ccfaa442 100644 --- a/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java +++ b/plugins/mapper-annotated-text/src/test/java/org/elasticsearch/index/mapper/annotatedtext/AnnotatedTextHighlighterTests.java @@ -130,7 +130,7 @@ public class AnnotatedTextHighlighterTests extends ESTestCase { } TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); String rawValue = Strings.collectionToDelimitedString(plainTextForHighlighter, String.valueOf(MULTIVAL_SEP_CHAR)); UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, hiliteAnalyzer); builder.withBreakIterator(() -> breakIterator); diff --git a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java index 4594e8d71c6f..b9f4943b1dab 100644 --- a/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java +++ b/plugins/store-smb/src/main/java/org/elasticsearch/index/store/smb/SmbMmapFsDirectoryFactory.java @@ -27,7 +27,6 @@ public final class SmbMmapFsDirectoryFactory extends FsDirectoryFactory { return new SmbDirectoryWrapper( setPreload( new MMapDirectory(location, lockFactory), - lockFactory, new HashSet<>(indexSettings.getValue(IndexModule.INDEX_STORE_PRE_LOAD_SETTING)) ) ); diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 8570662f7b52..73f291da15ea 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -33,6 +33,7 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction; +import org.elasticsearch.search.SearchFeatures; import org.elasticsearch.test.NotEqualMessageBuilder; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -1694,6 +1695,211 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas } } + /** + * This test ensures that search results on old indices using "persian" analyzer don't change + * after we introduce Lucene 10 + */ + public void testPersianAnalyzerBWC() throws Exception { + var originalClusterLegacyPersianAnalyzer = oldClusterHasFeature(SearchFeatures.LUCENE_10_0_0_UPGRADE) == false; + assumeTrue("Don't run this test if both versions already support stemming", originalClusterLegacyPersianAnalyzer); + final String indexName = "test_persian_stemmer"; + Settings idxSettings = indexSettings(1, 1).build(); + String mapping = """ + { + "properties": { + "textfield" : { + "type": "text", + "analyzer": "persian" + } + } + } + """; + + String query = """ + { + "query": { + "match": { + "textfield": "كتابها" + } + } + } + """; + + if (isRunningAgainstOldCluster()) { + createIndex(client(), indexName, idxSettings, mapping); + ensureGreen(indexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", "كتابها") + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", "كتاب") + ) + ) + ); + refresh(indexName); + + assertNumHits(indexName, 2, 1); + + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + } else { + // old index should still only return one doc + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + + String newIndexName = indexName + "_new"; + createIndex(client(), newIndexName, idxSettings, mapping); + ensureGreen(newIndexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", "كتابها") + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", "كتاب") + ) + ) + ); + refresh(newIndexName); + + searchRequest = new Request("POST", "/" + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(2, entityAsMap(client().performRequest(searchRequest))); + + // searching both indices (old and new analysis version) we should get 1 hit from the old and 2 from the new index + searchRequest = new Request("POST", "/" + indexName + "," + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(3, entityAsMap(client().performRequest(searchRequest))); + } + } + + /** + * This test ensures that search results on old indices using "romanain" analyzer don't change + * after we introduce Lucene 10 + */ + public void testRomanianAnalyzerBWC() throws Exception { + var originalClusterLegacyRomanianAnalyzer = oldClusterHasFeature(SearchFeatures.LUCENE_10_0_0_UPGRADE) == false; + assumeTrue("Don't run this test if both versions already support stemming", originalClusterLegacyRomanianAnalyzer); + final String indexName = "test_romanian_stemmer"; + Settings idxSettings = indexSettings(1, 1).build(); + String cedillaForm = "absenţa"; + String commaForm = "absența"; + + String mapping = """ + { + "properties": { + "textfield" : { + "type": "text", + "analyzer": "romanian" + } + } + } + """; + + // query that uses the cedilla form of "t" + String query = """ + { + "query": { + "match": { + "textfield": "absenţa" + } + } + } + """; + + if (isRunningAgainstOldCluster()) { + createIndex(client(), indexName, idxSettings, mapping); + ensureGreen(indexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", cedillaForm) + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + indexName + "/" + "_doc/2", + // this doc uses the comma form + (builder, params) -> builder.field("textfield", commaForm) + ) + ) + ); + refresh(indexName); + + assertNumHits(indexName, 2, 1); + + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + } else { + // old index should still only return one doc + Request searchRequest = new Request("POST", "/" + indexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(1, entityAsMap(client().performRequest(searchRequest))); + + String newIndexName = indexName + "_new"; + createIndex(client(), newIndexName, idxSettings, mapping); + ensureGreen(newIndexName); + + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/1", + (builder, params) -> builder.field("textfield", cedillaForm) + ) + ) + ); + assertOK( + client().performRequest( + newXContentRequest( + HttpMethod.POST, + "/" + newIndexName + "/" + "_doc/2", + (builder, params) -> builder.field("textfield", commaForm) + ) + ) + ); + refresh(newIndexName); + + searchRequest = new Request("POST", "/" + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(2, entityAsMap(client().performRequest(searchRequest))); + + // searching both indices (old and new analysis version) we should get 1 hit from the old and 2 from the new index + searchRequest = new Request("POST", "/" + indexName + "," + newIndexName + "/_search"); + searchRequest.setJsonEntity(query); + assertTotalHits(3, entityAsMap(client().performRequest(searchRequest))); + } + } + public void testForbidDisableSoftDeletesOnRestore() throws Exception { final String snapshot = "snapshot-" + index; if (isRunningAgainstOldCluster()) { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index a742e83255bb..7525ff2dc12d 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -54,7 +54,9 @@ tasks.named("precommit").configure { dependsOn 'enforceYamlTestConvention' } -tasks.named("yamlRestCompatTestTransform").configure({task -> - task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") - task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") +tasks.named("yamlRestCompatTestTransform").configure ({ task -> + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling") + task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count") + task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility") + task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility") }) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index dc79961ae78c..81ca84a06f81 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -212,7 +212,6 @@ dfs knn vector profiling: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score: 0 } @@ -235,6 +234,47 @@ dfs knn vector profiling: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } +--- +dfs knn vector profiling description: + - requires: + cluster_features: ["lucene_10_upgrade"] + reason: "the profile description changed with Lucene 10" + - do: + indices.create: + index: images + body: + settings: + index.number_of_shards: 1 + mappings: + properties: + image: + type: "dense_vector" + dims: 3 + index: true + similarity: "l2_norm" + + - do: + index: + index: images + id: "1" + refresh: true + body: + image: [1, 5, -20] + + - do: + search: + index: images + body: + profile: true + knn: + field: "image" + query_vector: [-5, 9, -12] + k: 1 + num_candidates: 100 + + - match: { hits.total.value: 1 } + - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" } + --- dfs knn vector profiling with vector_operations_count: - requires: @@ -276,7 +316,6 @@ dfs knn vector profiling with vector_operations_count: - match: { hits.total.value: 1 } - match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" } - - match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" } - match: { profile.shards.0.dfs.knn.0.vector_operations_count: 1 } - gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 } - match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 } @@ -300,7 +339,6 @@ dfs knn vector profiling with vector_operations_count: - match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" } - gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 } - --- dfs profile for search with dfs_query_then_fetch: - requires: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java index c56bc201e7f8..8bedf436e369 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/IndicesRequestIT.java @@ -571,7 +571,7 @@ public class IndicesRequestIT extends ESIntegTestCase { SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.QUERY_THEN_FETCH); assertNoFailuresAndResponse( internalCluster().coordOnlyNodeClient().search(searchRequest), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)) ); clearInterceptedActions(); @@ -601,7 +601,7 @@ public class IndicesRequestIT extends ESIntegTestCase { SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.DFS_QUERY_THEN_FETCH); assertNoFailuresAndResponse( internalCluster().coordOnlyNodeClient().search(searchRequest), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)) ); clearInterceptedActions(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java index cc6329a973b3..e8160a311bed 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/CreateIndexIT.java @@ -306,8 +306,8 @@ public class CreateIndexIT extends ESIntegTestCase { prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()) .setQuery(new RangeQueryBuilder("index_version").from(indexVersion.get(), true)), expected -> assertNoFailuresAndResponse(prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()), all -> { - assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value, all.getHits().getTotalHits().value); - logger.info("total: {}", expected.getHits().getTotalHits().value); + assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value(), all.getHits().getTotalHits().value()); + logger.info("total: {}", expected.getHits().getTotalHits().value()); }) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java index e1bf5bce6f3a..8391ab270b1d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/create/SplitIndexIT.java @@ -253,7 +253,7 @@ public class SplitIndexIT extends ESIntegTestCase { // now, do a nested query assertNoFailuresAndResponse( prepareSearch(index).setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numDocs)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java index 8b8b62da98f9..2fd6ee9a1680 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessor2RetryIT.java @@ -141,11 +141,11 @@ public class BulkProcessor2RetryIT extends ESIntegTestCase { assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> { assertThat(bulkProcessor.getTotalBytesInFlight(), equalTo(0L)); if (rejectedExecutionExpected) { - assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps)); } else if (finalRejectedAfterAllRetries) { - assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps)); } else { - assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps)); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index 37904e9f639a..4ed19065f32f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -136,11 +136,11 @@ public class BulkProcessorRetryIT extends ESIntegTestCase { final boolean finalRejectedAfterAllRetries = rejectedAfterAllRetries; assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> { if (rejectedExecutionExpected) { - assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps)); } else if (finalRejectedAfterAllRetries) { - assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps)); } else { - assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps)); + assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps)); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java index cde8d41b292b..4977d87d5a34 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/IncrementalBulkIT.java @@ -90,7 +90,7 @@ public class IncrementalBulkIT extends ESIntegTestCase { assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) 1)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) 1)); }); assertFalse(refCounted.hasReferences()); @@ -268,7 +268,7 @@ public class IncrementalBulkIT extends ESIntegTestCase { assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(docs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(docs)); }); } } @@ -358,7 +358,7 @@ public class IncrementalBulkIT extends ESIntegTestCase { assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(hits.get())); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(hits.get())); }); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java index af99a0344e03..cd17c5b345c5 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/TransportSimulateBulkActionIT.java @@ -81,7 +81,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase { ); indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); searchResponse.decRef(); ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); Map indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap(); @@ -138,7 +138,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase { // Now make sure nothing was actually changed: indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet(); SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); searchResponse.decRef(); ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet(); Map indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java index 274cf90ec952..f17196c3d97f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/bulk/WriteAckDelayIT.java @@ -45,9 +45,9 @@ public class WriteAckDelayIT extends ESIntegTestCase { try { logger.debug("running search"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java index 66323e687eef..e47925cef913 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java @@ -612,7 +612,7 @@ public class PointInTimeIT extends ESIntegTestCase { assertThat(resp.getSuccessfulShards(), equalTo(numShards - shardsRemoved)); assertThat(resp.getFailedShards(), equalTo(shardsRemoved)); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); }); // create a PIT when some shards are missing @@ -637,7 +637,7 @@ public class PointInTimeIT extends ESIntegTestCase { assertThat(resp.getFailedShards(), equalTo(shardsRemoved)); assertThat(resp.pointInTimeId(), equalTo(pointInTimeResponseOneNodeDown.getPointInTimeId())); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); } ); @@ -661,7 +661,7 @@ public class PointInTimeIT extends ESIntegTestCase { assertThat(resp.getSuccessfulShards(), equalTo(numShards)); assertThat(resp.getFailedShards(), equalTo(0)); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, greaterThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), greaterThan((long) numDocs)); }); // ensure that when using the previously created PIT, we'd see the same number of documents as before regardless of the @@ -681,7 +681,7 @@ public class PointInTimeIT extends ESIntegTestCase { } assertNotNull(resp.getHits().getTotalHits()); // we expect less documents as the newly indexed ones should not be part of the PIT - assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs)); + assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs)); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java index d1a68c68e7de..a1395f81eb09 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/TransportSearchIT.java @@ -143,7 +143,7 @@ public class TransportSearchIT extends ESIntegTestCase { randomBoolean() ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); SearchHit[] hits = searchResponse.getHits().getHits(); assertEquals(1, hits.length); SearchHit hit = hits[0]; @@ -162,7 +162,7 @@ public class TransportSearchIT extends ESIntegTestCase { randomBoolean() ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); SearchHit[] hits = searchResponse.getHits().getHits(); assertEquals(1, hits.length); SearchHit hit = hits[0]; @@ -221,7 +221,7 @@ public class TransportSearchIT extends ESIntegTestCase { ); searchRequest.indices(""); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); }); } @@ -241,7 +241,7 @@ public class TransportSearchIT extends ESIntegTestCase { sourceBuilder.query(rangeQuery); searchRequest.source(sourceBuilder); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex()); }); } @@ -280,7 +280,7 @@ public class TransportSearchIT extends ESIntegTestCase { ? originalRequest : SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(1, longTerms.getBuckets().size()); @@ -296,7 +296,7 @@ public class TransportSearchIT extends ESIntegTestCase { false ); assertResponse(client().search(searchRequest), searchResponse -> { - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); InternalAggregations aggregations = searchResponse.getAggregations(); LongTerms longTerms = aggregations.get("terms"); assertEquals(2, longTerms.getBuckets().size()); @@ -432,7 +432,7 @@ public class TransportSearchIT extends ESIntegTestCase { () -> assertResponse( prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) .setPreFilterShardSize(randomIntBetween(1, 3)), - resp -> assertThat(resp.getHits().getTotalHits().value, equalTo(2L)) + resp -> assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)) ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java index 848c5cacda1b..b70da34c8fe3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/aliases/IndexAliasesIT.java @@ -396,7 +396,7 @@ public class IndexAliasesIT extends ESIntegTestCase { ); assertResponse( prepareSearch("foos").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)) ); logger.info("--> checking filtering alias for one index"); @@ -406,7 +406,7 @@ public class IndexAliasesIT extends ESIntegTestCase { ); assertResponse( prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); logger.info("--> checking filtering alias for two indices and one complete index"); @@ -416,7 +416,7 @@ public class IndexAliasesIT extends ESIntegTestCase { ); assertResponse( prepareSearch("foos", "test1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for one index"); @@ -426,17 +426,17 @@ public class IndexAliasesIT extends ESIntegTestCase { ); assertResponse( prepareSearch("foos", "aliasToTest1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); assertResponse( prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); assertResponse( prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices"); @@ -446,7 +446,7 @@ public class IndexAliasesIT extends ESIntegTestCase { ); assertResponse( prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.termQuery("name", "something")), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)) ); } @@ -508,7 +508,7 @@ public class IndexAliasesIT extends ESIntegTestCase { ); assertResponse( prepareSearch("filter23", "filter13").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) ); assertResponse( @@ -517,7 +517,7 @@ public class IndexAliasesIT extends ESIntegTestCase { ); assertResponse( prepareSearch("filter23", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L)) ); assertResponse( @@ -526,7 +526,7 @@ public class IndexAliasesIT extends ESIntegTestCase { ); assertResponse( prepareSearch("filter13", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L)) ); assertResponse( @@ -535,7 +535,7 @@ public class IndexAliasesIT extends ESIntegTestCase { ); assertResponse( prepareSearch("filter13", "filter1", "filter23").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); assertResponse( @@ -544,7 +544,7 @@ public class IndexAliasesIT extends ESIntegTestCase { ); assertResponse( prepareSearch("filter23", "filter13", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)) ); assertResponse( @@ -553,7 +553,7 @@ public class IndexAliasesIT extends ESIntegTestCase { ); assertResponse( prepareSearch("filter23", "filter13", "test1", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L)) ); } @@ -608,7 +608,7 @@ public class IndexAliasesIT extends ESIntegTestCase { logger.info("--> checking counts before delete"); assertResponse( prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); } @@ -1399,7 +1399,7 @@ public class IndexAliasesIT extends ESIntegTestCase { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java index 4e7c22f0d884..f7dae8a92c2d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/broadcast/BroadcastActionsIT.java @@ -44,7 +44,7 @@ public class BroadcastActionsIT extends ESIntegTestCase { for (int i = 0; i < 5; i++) { // test successful assertResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> { - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java index eb10877f5892..97994a38c277 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/document/DocumentActionsIT.java @@ -152,7 +152,7 @@ public class DocumentActionsIT extends ESIntegTestCase { for (int i = 0; i < 5; i++) { // test successful assertNoFailuresAndResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> { - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); }); @@ -164,7 +164,7 @@ public class DocumentActionsIT extends ESIntegTestCase { countResponse.getShardFailures() == null ? 0 : countResponse.getShardFailures().length, equalTo(0) ); - assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries)); assertThat(countResponse.getFailedShards(), equalTo(0)); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java index 5da9788e3079..4d1ed9bce644 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/FinalPipelineIT.java @@ -115,7 +115,7 @@ public class FinalPipelineIT extends ESIntegTestCase { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } @@ -139,7 +139,7 @@ public class FinalPipelineIT extends ESIntegTestCase { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals(true, response.getHits().getAt(0).getSourceAsMap().get("final")); }); } @@ -163,7 +163,7 @@ public class FinalPipelineIT extends ESIntegTestCase { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } @@ -187,7 +187,7 @@ public class FinalPipelineIT extends ESIntegTestCase { .get(); assertEquals(RestStatus.CREATED, indexResponse.status()); assertResponse(prepareSearch("target"), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertTrue(response.getHits().getAt(0).getSourceAsMap().containsKey("final")); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java index 7b7433e3aa4c..cb280d5577fa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/engine/MaxDocsLimitIT.java @@ -107,7 +107,7 @@ public class MaxDocsLimitIT extends ESIntegTestCase { indicesAdmin().prepareRefresh("test").get(); assertNoFailuresAndResponse( prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0), - response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get())) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get())) ); if (randomBoolean()) { indicesAdmin().prepareFlush("test").get(); @@ -117,7 +117,7 @@ public class MaxDocsLimitIT extends ESIntegTestCase { ensureGreen("test"); assertNoFailuresAndResponse( prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0), - response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get())) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get())) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java index 81a0e0ede7cd..1194218c68ff 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/mapper/CopyToMapperIntegrationIT.java @@ -46,7 +46,7 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase { AggregationBuilders.terms("test_raw").field("test_field_raw").size(recordCount * 2).collectMode(aggCollectionMode) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) recordCount)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) recordCount)); assertThat(((Terms) response.getAggregations().get("test")).getBuckets().size(), equalTo(recordCount + 1)); assertThat(((Terms) response.getAggregations().get("test_raw")).getBuckets().size(), equalTo(recordCount)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java index 03afabaae1d0..902dd911ddcd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/ExceptionRetryIT.java @@ -115,7 +115,7 @@ public class ExceptionRetryIT extends ESIntegTestCase { assertResponse( prepareSearch("index").setQuery(termQuery("_id", response.getHits().getHits()[i].getId())).setExplain(true), dupIdResponse -> { - assertThat(dupIdResponse.getHits().getTotalHits().value, greaterThan(1L)); + assertThat(dupIdResponse.getHits().getTotalHits().value(), greaterThan(1L)); logger.info("found a duplicate id:"); for (SearchHit hit : dupIdResponse.getHits()) { logger.info("Doc {} was found on shard {}", hit.getId(), hit.getShard().getShardId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java index 62c5f934ec8b..37fbc95d5650 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indexing/IndexActionIT.java @@ -57,9 +57,9 @@ public class IndexActionIT extends ESIntegTestCase { try { logger.debug("running search with all types"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " @@ -77,9 +77,9 @@ public class IndexActionIT extends ESIntegTestCase { try { logger.debug("running search with a specific type"); assertResponse(prepareSearch("test"), response -> { - if (response.getHits().getTotalHits().value != numOfDocs) { + if (response.getHits().getTotalHits().value() != numOfDocs) { final String message = "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numOfDocs + " was expected. " diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 7db810fc70ac..52492ba7ce65 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -149,7 +149,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 5); @@ -161,7 +161,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); @@ -174,7 +174,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 6, 9); @@ -217,7 +217,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 0, 1); @@ -229,7 +229,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 1, 1); @@ -241,7 +241,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index", 2, 1); @@ -286,7 +286,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 0, 1); @@ -299,7 +299,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 1, 1); @@ -312,7 +312,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .addAggregation(new GlobalAggregationBuilder("global")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(9L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(9L)); } ); assertCacheState(client, "index", 2, 1); @@ -364,7 +364,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 0, 1); @@ -381,7 +381,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 1, 1); @@ -395,7 +395,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); } ); assertCacheState(client, "index-1", 2, 1); @@ -440,7 +440,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-25")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -453,7 +453,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -468,7 +468,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -483,7 +483,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .addAggregation(dateRange("foo").field("s").addRange("now-10y", "now")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 0); @@ -497,7 +497,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-21").lte("2016-03-27")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 2); @@ -512,7 +512,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .addAggregation(filter("foo", QueryBuilders.rangeQuery("s").from("now-10y").to("now"))), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(7L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(7L)); } ); assertCacheState(client, "index", 0, 4); @@ -543,7 +543,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); assertCacheState(client, "index", 0, 1); @@ -555,20 +555,20 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { .setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); assertCacheState(client, "index", 1, 1); assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); }); assertCacheState(client, "index", 1, 2); assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); }); assertCacheState(client, "index", 2, 2); } @@ -591,7 +591,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase { client.prepareSearch("index").setRequestCache(true).setProfile(profile).setQuery(QueryBuilders.termQuery("k", "hello")), response -> { ElasticsearchAssertions.assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); } ); if (profile == false) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java index a6b168af5268..cbb0a67edcb8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/state/CloseWhileRelocatingShardsIT.java @@ -229,7 +229,7 @@ public class CloseWhileRelocatingShardsIT extends ESIntegTestCase { for (String index : acknowledgedCloses) { assertResponse(prepareSearch(index).setSize(0).setTrackTotalHits(true), response -> { - long docsCount = response.getHits().getTotalHits().value; + long docsCount = response.getHits().getTotalHits().value(); assertEquals( "Expected " + docsPerIndex.get(index) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java index 942f86017c61..77c4f8a26f47 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RecoveryWhileUnderLoadIT.java @@ -344,7 +344,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).setTrackTotalHits(true).addSort("id", SortOrder.ASC), response -> { logSearchResponse(numberOfShards, numberOfDocs, finalI, response); - iterationHitCount[finalI] = response.getHits().getTotalHits().value; + iterationHitCount[finalI] = response.getHits().getTotalHits().value(); if (iterationHitCount[finalI] != numberOfDocs) { error[0] = true; } @@ -391,7 +391,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { boolean[] errorOccurred = new boolean[1]; for (int i = 0; i < iterations; i++) { assertResponse(prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()), response -> { - if (response.getHits().getTotalHits().value != numberOfDocs) { + if (response.getHits().getTotalHits().value() != numberOfDocs) { errorOccurred[0] = true; } }); @@ -421,7 +421,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase { logger.info( "iteration [{}] - returned documents: {} (expected {})", iteration, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), numberOfDocs ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java index fb1fabfd198e..2c56f75b051e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/recovery/RelocationIT.java @@ -240,7 +240,7 @@ public class RelocationIT extends ESIntegTestCase { prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields(), response -> { var hits = response.getHits(); - if (hits.getTotalHits().value != indexer.totalIndexedDocs()) { + if (hits.getTotalHits().value() != indexer.totalIndexedDocs()) { int[] hitIds = new int[(int) indexer.totalIndexedDocs()]; for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) { hitIds[hit] = hit + 1; @@ -254,7 +254,7 @@ public class RelocationIT extends ESIntegTestCase { } set.forEach(value -> logger.error("Missing id [{}]", value)); } - assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs())); + assertThat(hits.getTotalHits().value(), equalTo(indexer.totalIndexedDocs())); logger.info("--> DONE search test round {}", idx + 1); } ); @@ -364,9 +364,9 @@ public class RelocationIT extends ESIntegTestCase { for (Client client : clients()) { assertNoFailuresAndResponse(client.prepareSearch("test").setPreference("_local").setSize(0), response -> { if (expectedCount[0] < 0) { - expectedCount[0] = response.getHits().getTotalHits().value; + expectedCount[0] = response.getHits().getTotalHits().value(); } else { - assertEquals(expectedCount[0], response.getHits().getTotalHits().value); + assertEquals(expectedCount[0], response.getHits().getTotalHits().value()); } }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java index 45dce5789b9b..199c9a9fb4c8 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/AliasRoutingIT.java @@ -296,7 +296,7 @@ public class AliasRoutingIT extends ESIntegTestCase { prepareSearch("index_*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1).setQuery(QueryBuilders.matchAllQuery()), response -> { logger.info("--> search all on index_* should find two"); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); // Let's make sure that, even though 2 docs are available, only one is returned according to the size we set in the request // Therefore the reduce phase has taken place, which proves that the QUERY_AND_FETCH search type wasn't erroneously forced. assertThat(response.getHits().getHits().length, equalTo(1)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java index 7bccf3db1284..68bc6656cec7 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/routing/PartitionedRoutingIT.java @@ -160,7 +160,7 @@ public class PartitionedRoutingIT extends ESIntegTestCase { + "] shards for routing [" + routing + "] and got hits [" - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + "]" ); @@ -168,7 +168,7 @@ public class PartitionedRoutingIT extends ESIntegTestCase { response.getTotalShards() + " was not in " + expectedShards + " for " + index, expectedShards.contains(response.getTotalShards()) ); - assertEquals(expectedDocuments, response.getHits().getTotalHits().value); + assertEquals(expectedDocuments, response.getHits().getTotalHits().value()); Set found = new HashSet<>(); response.getHits().forEach(h -> found.add(h.getId())); @@ -188,7 +188,7 @@ public class PartitionedRoutingIT extends ESIntegTestCase { prepareSearch().setQuery(QueryBuilders.termQuery("_routing", routing)).setIndices(index).setSize(100), response -> { assertEquals(expectedShards, response.getTotalShards()); - assertEquals(expectedDocuments, response.getHits().getTotalHits().value); + assertEquals(expectedDocuments, response.getHits().getTotalHits().value()); Set found = new HashSet<>(); response.getHits().forEach(h -> found.add(h.getId())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java index ee1aac60da9c..f63f09764621 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchTimeoutIT.java @@ -64,7 +64,7 @@ public class SearchTimeoutIT extends ESIntegTestCase { assertEquals(0, searchResponse.getFailedShards()); assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); } @@ -81,7 +81,7 @@ public class SearchTimeoutIT extends ESIntegTestCase { assertEquals(0, searchResponse.getFailedShards()); assertThat(searchResponse.getSuccessfulShards(), greaterThan(0)); assertEquals(searchResponse.getSuccessfulShards(), searchResponse.getTotalShards()); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertEquals(searchResponse.getHits().getHits().length, 0); StringTerms terms = searchResponse.getAggregations().get("terms"); assertEquals(1, terms.getBuckets().size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java index d023c9de87ca..4a407ae66f7a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/CombiIT.java @@ -115,7 +115,7 @@ public class CombiIT extends ESIntegTestCase { histogram("values").field("value1").interval(1).subAggregation(terms("names").field("name").collectMode(aggCollectionMode)) ), response -> { - assertThat(response.getHits().getTotalHits().value, Matchers.equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), Matchers.equalTo(0L)); Histogram values = response.getAggregations().get("values"); assertThat(values, notNullValue()); assertThat(values.getBuckets().isEmpty(), is(true)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java index 5a21b600cacd..1a6e1519d440 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/EquivalenceIT.java @@ -293,7 +293,7 @@ public class EquivalenceIT extends ESIntegTestCase { ), response -> { assertAllSuccessful(response); - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); final Terms longTerms = response.getAggregations().get("long"); final Terms doubleTerms = response.getAggregations().get("double"); @@ -413,7 +413,7 @@ public class EquivalenceIT extends ESIntegTestCase { ), response -> { assertAllSuccessful(response); - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); } ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java index a820e6e8d174..2bd19c9d32d4 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/FiltersAggsRewriteIT.java @@ -57,7 +57,7 @@ public class FiltersAggsRewriteIT extends ESSingleNodeTestCase { metadata.put(randomAlphaOfLengthBetween(1, 20), randomAlphaOfLengthBetween(1, 20)); builder.setMetadata(metadata); assertResponse(client().prepareSearch("test").setSize(0).addAggregation(builder), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); InternalFilters filters = response.getAggregations().get("titles"); assertEquals(1, filters.getBuckets().size()); assertEquals(2, filters.getBuckets().get(0).getDocCount()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java index a8e2ca818d3f..c4560c1b0007 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java @@ -974,7 +974,7 @@ public class DateHistogramIT extends ESIntegTestCase { .subAggregation(dateHistogram("date_histo").field("value").fixedInterval(DateHistogramInterval.HOUR)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List buckets = histo.getBuckets(); @@ -1011,7 +1011,7 @@ public class DateHistogramIT extends ESIntegTestCase { .format("yyyy-MM-dd:HH-mm-ssZZZZZ") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -1175,7 +1175,7 @@ public class DateHistogramIT extends ESIntegTestCase { assertThat( "Expected 24 buckets for one day aggregation with hourly interval", - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), equalTo(2L) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java index 778be4ee0705..21b36391781b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateHistogramOffsetIT.java @@ -78,7 +78,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { dateHistogram("date_histo").field("date").offset("2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -99,7 +99,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { dateHistogram("date_histo").field("date").offset("-2h").format(DATE_FORMAT).fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); @@ -128,7 +128,7 @@ public class DateHistogramOffsetIT extends ESIntegTestCase { .fixedInterval(DateHistogramInterval.DAY) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(24L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(24L)); Histogram histo = response.getAggregations().get("date_histo"); List buckets = histo.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java index afa3ad9d7e73..9ec459ee565e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/DateRangeIT.java @@ -578,7 +578,7 @@ public class DateRangeIT extends ESIntegTestCase { .subAggregation(dateRange("date_range").field("value").addRange("0-1", 0, 1)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -722,7 +722,7 @@ public class DateRangeIT extends ESIntegTestCase { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("00:16:40", "00:50:00").addRange("00:50:00", "01:06:40")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00:16:40-00:50:00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00:50:00-01:06:40", 3000000L, 4000000L); @@ -739,7 +739,7 @@ public class DateRangeIT extends ESIntegTestCase { .format("HH.mm.ss") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00.50.00-01.06.40", 3000000L, 4000000L); @@ -753,7 +753,7 @@ public class DateRangeIT extends ESIntegTestCase { dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); @@ -788,7 +788,7 @@ public class DateRangeIT extends ESIntegTestCase { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1000, 3000).addRange(3000, 4000)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -799,7 +799,7 @@ public class DateRangeIT extends ESIntegTestCase { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange("1000", "3000").addRange("3000", "4000")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -810,7 +810,7 @@ public class DateRangeIT extends ESIntegTestCase { prepareSearch(indexName).setSize(0) .addAggregation(dateRange("date_range").field("date").addRange(1.0e3, 3000.8123).addRange(3000.8123, 4.0e3)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000-3000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000-4000", 3000000L, 4000000L); @@ -827,7 +827,7 @@ public class DateRangeIT extends ESIntegTestCase { .format("HH.mm.ss") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "00.16.40-00.50.00", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "00.50.00-01.06.40", 3000000L, 4000000L); @@ -841,7 +841,7 @@ public class DateRangeIT extends ESIntegTestCase { dateRange("date_range").field("date").addRange(1000000, 3000000).addRange(3000000, 4000000).format("epoch_millis") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); List buckets = checkBuckets(response.getAggregations().get("date_range"), "date_range", 2); assertBucket(buckets.get(0), 2L, "1000000-3000000", 1000000L, 3000000L); assertBucket(buckets.get(1), 1L, "3000000-4000000", 3000000L, 4000000L); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java index 1b70b859426d..96807ed11986 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FilterIT.java @@ -159,7 +159,7 @@ public class FilterIT extends ESIntegTestCase { histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(filter("filter", matchAllQuery())) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java index b030370215cd..439583de910c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/FiltersIT.java @@ -247,7 +247,7 @@ public class FiltersIT extends ESIntegTestCase { .subAggregation(filters("filters", new KeyedFilter("all", matchAllQuery()))) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -455,7 +455,7 @@ public class FiltersIT extends ESIntegTestCase { .subAggregation(filters("filters", new KeyedFilter("foo", matchAllQuery())).otherBucket(true).otherBucketKey("bar")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index 843e50a5a7e2..907f943e6842 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -413,7 +413,7 @@ public class GeoDistanceIT extends ESIntegTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java index 2edd567221be..ad65e6468b81 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java @@ -915,7 +915,7 @@ public class HistogramIT extends ESIntegTestCase { .subAggregation(histogram("sub_histo").field(SINGLE_VALUED_FIELD_NAME).interval(1L)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); List buckets = histo.getBuckets(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java index 72f1b0cc56b2..5e7cffcc8ef0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/NestedIT.java @@ -351,7 +351,7 @@ public class NestedIT extends ESIntegTestCase { prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(nested("nested", "nested"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java index 8b63efd92a64..1cfd6e00af7a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/bucket/RangeIT.java @@ -866,7 +866,7 @@ public class RangeIT extends ESIntegTestCase { .subAggregation(range("range").field(SINGLE_VALUED_FIELD_NAME).addRange("0-2", 0.0, 2.0)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, Matchers.notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java index 5e2a44285e8f..29bf8a8a0b45 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java @@ -97,7 +97,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(extendedStats("stats").field("value")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -130,7 +130,7 @@ public class ExtendedStatsIT extends AbstractNumericTestCase { assertResponse( prepareSearch("idx_unmapped").setQuery(matchAllQuery()).addAggregation(extendedStats("stats").field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); ExtendedStats stats = response.getAggregations().get("stats"); assertThat(stats, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java index 762bc5bdfaf3..ff4150556c01 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java @@ -112,7 +112,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -138,7 +138,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase { .field("value") ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); PercentileRanks reversePercentiles = response.getAggregations().get("percentile_ranks"); assertThat(reversePercentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java index 12ed0a5c1a8e..fe6dc7abf66a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java @@ -116,7 +116,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -143,7 +143,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase { .percentiles(0, 10, 15, 100) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); Percentiles percentiles = response.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 52425ae1d9f1..4c8fed2c16dd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -358,7 +358,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { prepareSearch("idx").setQuery(matchAllQuery()) .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -407,7 +407,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -467,7 +467,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -522,7 +522,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -586,7 +586,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -655,7 +655,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -714,7 +714,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Global global = response.getAggregations().get("global"); assertThat(global, notNullValue()); @@ -773,7 +773,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -824,7 +824,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -869,7 +869,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { scriptedMetric("scripted").params(params).mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -928,7 +928,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { .reduceScript(reduceScript) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -964,7 +964,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { .reduceScript(new Script(ScriptType.STORED, null, "reduceScript_stored", Collections.emptyMap())) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("scripted"); assertThat(aggregation, notNullValue()); @@ -1025,7 +1025,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo(numDocs)); Aggregation aggregation = response.getAggregations().get("histo"); assertThat(aggregation, notNullValue()); assertThat(aggregation, instanceOf(Histogram.class)); @@ -1099,7 +1099,7 @@ public class ScriptedMetricIT extends ESIntegTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java index fbe70ec2a40d..1169f8bbdbf1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java @@ -56,7 +56,7 @@ public class StatsIT extends AbstractNumericTestCase { ), response -> { assertShardExecutionState(response, 0); - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java index 2a8be6b4244d..b3ad5c578e61 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/SumIT.java @@ -82,7 +82,7 @@ public class SumIT extends AbstractNumericTestCase { prepareSearch("empty_bucket_idx").setQuery(matchAllQuery()) .addAggregation(histogram("histo").field("value").interval(1L).minDocCount(0).subAggregation(sum("sum").field("value"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java index 2877f8882d6d..d6cceb201370 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java @@ -105,7 +105,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { .subAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 10, 15 }).field("value"))) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -146,7 +146,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase { prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentileRanks("percentile_ranks", new double[] { 0, 10, 15, 100 })).field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); PercentileRanks reversePercentiles = response.getAggregations().get("percentile_ranks"); assertThat(reversePercentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java index bbcf7b191fe1..b4072bcf226e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java @@ -111,7 +111,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { .subAggregation(randomCompression(percentiles("percentiles").field("value")).percentiles(10, 15)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); Histogram histo = response.getAggregations().get("histo"); assertThat(histo, notNullValue()); Histogram.Bucket bucket = histo.getBuckets().get(1); @@ -132,7 +132,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase { prepareSearch("idx_unmapped").setQuery(matchAllQuery()) .addAggregation(randomCompression(percentiles("percentiles")).field("value").percentiles(0, 10, 15, 100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); Percentiles percentiles = response.getAggregations().get("percentiles"); assertThat(percentiles, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java index 7ac8e3c7a35b..80c47d6180db 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java @@ -328,7 +328,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); higestSortValue += 10; assertThat((Long) hits.getAt(0).getSortValues()[0], equalTo(higestSortValue)); @@ -348,7 +348,7 @@ public class TopHitsIT extends ESIntegTestCase { .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group").subAggregation(topHits("hits"))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); Terms terms = response.getAggregations().get("terms"); @@ -381,7 +381,7 @@ public class TopHitsIT extends ESIntegTestCase { .setQuery(matchQuery("text", "x y z")) .addAggregation(terms("terms").executionHint(randomExecutionHint()).field("group")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(8L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(8L)); assertThat(response.getHits().getHits().length, equalTo(0)); assertThat(response.getHits().getMaxScore(), equalTo(Float.NaN)); Terms terms = response.getAggregations().get("terms"); @@ -413,7 +413,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSourceAsMap().size(), equalTo(5)); @@ -444,7 +444,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSourceAsMap().size(), equalTo(5)); @@ -501,7 +501,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(controlHits.getTotalHits().value)); + assertThat(hits.getTotalHits().value(), equalTo(controlHits.getTotalHits().value())); assertThat(hits.getHits().length, equalTo(controlHits.getHits().length)); for (int i = 0; i < hits.getHits().length; i++) { logger.info( @@ -543,7 +543,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); assertThat(hits.getAt(0).getSortValues()[0], equalTo(higestSortValue)); assertThat(hits.getAt(1).getSortValues()[0], equalTo(higestSortValue - 1)); @@ -578,7 +578,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(key(bucket), equalTo("b")); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); + assertThat(hits.getTotalHits().value(), equalTo(4L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("6")); @@ -586,7 +586,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(key(bucket), equalTo("c")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(3L)); + assertThat(hits.getTotalHits().value(), equalTo(3L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("9")); @@ -594,7 +594,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(key(bucket), equalTo("a")); topHits = bucket.getAggregations().get("hits"); hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(2L)); + assertThat(hits.getTotalHits().value(), equalTo(2L)); assertThat(hits.getHits().length, equalTo(1)); assertThat(hits.getAt(0).getId(), equalTo("2")); } @@ -630,7 +630,7 @@ public class TopHitsIT extends ESIntegTestCase { for (Terms.Bucket bucket : terms.getBuckets()) { TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(1)); SearchHit hit = hits.getAt(0); @@ -682,7 +682,7 @@ public class TopHitsIT extends ESIntegTestCase { TopHits hits = response.getAggregations().get("hits"); assertThat(hits, notNullValue()); assertThat(hits.getName(), equalTo("hits")); - assertThat(hits.getHits().getTotalHits().value, equalTo(0L)); + assertThat(hits.getHits().getTotalHits().value(), equalTo(0L)); }); } @@ -744,7 +744,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(bucket.getDocCount(), equalTo(1L)); TopHits topHits = bucket.getAggregations().get("top-comments"); SearchHits searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(1)); @@ -753,7 +753,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(bucket.getDocCount(), equalTo(2L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(2L)); + assertThat(searchHits.getTotalHits().value(), equalTo(2L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(2)); @@ -765,7 +765,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(bucket.getDocCount(), equalTo(1L)); topHits = bucket.getAggregations().get("top-comments"); searchHits = topHits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); assertThat(extractValue("date", searchHits.getAt(0).getSourceAsMap()), equalTo(4)); @@ -789,7 +789,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(toComments.getDocCount(), equalTo(4L)); TopHits topComments = toComments.getAggregations().get("top-comments"); - assertThat(topComments.getHits().getTotalHits().value, equalTo(4L)); + assertThat(topComments.getHits().getTotalHits().value(), equalTo(4L)); assertThat(topComments.getHits().getHits().length, equalTo(4)); assertThat(topComments.getHits().getAt(0).getId(), equalTo("2")); @@ -816,7 +816,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(toReviewers.getDocCount(), equalTo(7L)); TopHits topReviewers = toReviewers.getAggregations().get("top-reviewers"); - assertThat(topReviewers.getHits().getTotalHits().value, equalTo(7L)); + assertThat(topReviewers.getHits().getTotalHits().value(), equalTo(7L)); assertThat(topReviewers.getHits().getHits().length, equalTo(7)); assertThat(topReviewers.getHits().getAt(0).getId(), equalTo("1")); @@ -899,7 +899,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(nested.getDocCount(), equalTo(4L)); SearchHits hits = ((TopHits) nested.getAggregations().get("top-comments")).getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); + assertThat(hits.getTotalHits().value(), equalTo(4L)); SearchHit searchHit = hits.getAt(0); assertThat(searchHit.getId(), equalTo("1")); assertThat(searchHit.getNestedIdentity().getField().string(), equalTo("comments")); @@ -960,7 +960,7 @@ public class TopHitsIT extends ESIntegTestCase { TopHits hits = nested.getAggregations().get("comments"); SearchHits searchHits = hits.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(numNestedDocs)); + assertThat(searchHits.getTotalHits().value(), equalTo(numNestedDocs)); for (int j = 0; j < 3; j++) { assertThat(searchHits.getAt(j).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(searchHits.getAt(j).getNestedIdentity().getOffset(), equalTo(0)); @@ -1064,7 +1064,7 @@ public class TopHitsIT extends ESIntegTestCase { assertThat(bucket.getDocCount(), equalTo(10L)); TopHits topHits = bucket.getAggregations().get("hits"); SearchHits hits = topHits.getHits(); - assertThat(hits.getTotalHits().value, equalTo(10L)); + assertThat(hits.getTotalHits().value(), equalTo(10L)); assertThat(hits.getHits().length, equalTo(3)); for (SearchHit hit : hits) { assertThat(hit.getSourceAsMap(), nullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java index 3dee7a8d6e92..6e00c1e5a8d9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/ValueCountIT.java @@ -67,7 +67,7 @@ public class ValueCountIT extends ESIntegTestCase { public void testUnmapped() throws Exception { assertResponse(prepareSearch("idx_unmapped").setQuery(matchAllQuery()).addAggregation(count("count").field("value")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); ValueCount valueCount = response.getAggregations().get("count"); assertThat(valueCount, notNullValue()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java index 3263be081a6f..2cd22c6a6522 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileCreatingIndexIT.java @@ -72,14 +72,14 @@ public class SearchWhileCreatingIndexIT extends ESIntegTestCase { .setPreference(preference + Integer.toString(counter++)) .setQuery(QueryBuilders.termQuery("field", "test")), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value != 1) { + if (searchResponse.getHits().getTotalHits().value() != 1) { refresh(); assertResponse( client.prepareSearch("test").setPreference(preference).setQuery(QueryBuilders.termQuery("field", "test")), searchResponseAfterRefresh -> { logger.info( "hits count mismatch on any shard search failed, post explicit refresh hits are {}", - searchResponseAfterRefresh.getHits().getTotalHits().value + searchResponseAfterRefresh.getHits().getTotalHits().value() ); ensureGreen(); assertResponse( @@ -88,7 +88,7 @@ public class SearchWhileCreatingIndexIT extends ESIntegTestCase { .setQuery(QueryBuilders.termQuery("field", "test")), searchResponseAfterGreen -> logger.info( "hits count mismatch on any shard search failed, post explicit wait for green hits are {}", - searchResponseAfterGreen.getHits().getTotalHits().value + searchResponseAfterGreen.getHits().getTotalHits().value() ) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java index cab70ba7d733..0d06856ca108 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java @@ -77,7 +77,7 @@ public class SearchWhileRelocatingIT extends ESIntegTestCase { try { while (stop.get() == false) { assertResponse(prepareSearch().setSize(numDocs), response -> { - if (response.getHits().getTotalHits().value != numDocs) { + if (response.getHits().getTotalHits().value() != numDocs) { // if we did not search all shards but had no serious failures that is potentially fine // if only the hit-count is wrong. this can happen if the cluster-state is behind when the // request comes in. It's a small window but a known limitation. @@ -86,7 +86,7 @@ public class SearchWhileRelocatingIT extends ESIntegTestCase { .allMatch(ssf -> ssf.getCause() instanceof NoShardAvailableActionException)) { nonCriticalExceptions.add( "Count is " - + response.getHits().getTotalHits().value + + response.getHits().getTotalHits().value() + " but " + numDocs + " was expected. " @@ -100,7 +100,7 @@ public class SearchWhileRelocatingIT extends ESIntegTestCase { final SearchHits sh = response.getHits(); assertThat( "Expected hits to be the same size the actual hits array", - sh.getTotalHits().value, + sh.getTotalHits().value(), equalTo((long) (sh.getHits().length)) ); }); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java index 1745ad82931b..4b59d5b9a78d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/TransportTwoNodesSearchIT.java @@ -126,7 +126,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -169,7 +169,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -208,7 +208,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -237,7 +237,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { assertNoFailuresAndResponse( client().search(new SearchRequest("test").source(source.from(0).size(60)).searchType(QUERY_THEN_FETCH)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(60)); for (int i = 0; i < 60; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; @@ -248,7 +248,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { assertNoFailuresAndResponse( client().search(new SearchRequest("test").source(source.from(60).size(60)).searchType(QUERY_THEN_FETCH)), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(40)); for (int i = 0; i < 40; i++) { SearchHit hit = searchResponse.getHits().getHits()[i]; @@ -271,7 +271,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { .get(); while (true) { assertNoFailures(searchResponse); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); SearchHit[] hits = searchResponse.getHits().getHits(); if (hits.length == 0) { break; // finished @@ -301,7 +301,7 @@ public class TransportTwoNodesSearchIT extends ESIntegTestCase { .aggregation(AggregationBuilders.filter("test1", termQuery("name", "test1"))); assertNoFailuresAndResponse(client().search(new SearchRequest("test").source(sourceBuilder)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(100L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(100L)); Global global = response.getAggregations().get("global"); Filter all = global.getAggregations().get("all"); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java index 223ee81e84a9..5233a0cd564e 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/ccs/CrossClusterSearchIT.java @@ -685,7 +685,7 @@ public class CrossClusterSearchIT extends AbstractMultiClustersTestCase { assertNotNull(localClusterSearchInfo); Cluster remoteClusterSearchInfo = clusters.getCluster(REMOTE_CLUSTER); assertNotNull(remoteClusterSearchInfo); - assertThat(Objects.requireNonNull(response.getHits().getTotalHits()).value, greaterThan(2L)); + assertThat(Objects.requireNonNull(response.getHits().getTotalHits()).value(), greaterThan(2L)); for (var hit : response.getHits()) { assertThat(hit.getIndex(), anyOf(equalTo("datemath-2001-01-01-14"), equalTo("remotemath-2001-01-01-14"))); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java index 2cb2e186b257..91cc344614c2 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/FetchSubPhasePluginIT.java @@ -139,7 +139,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase { hitField = new DocumentField(NAME, new ArrayList<>(1)); hitContext.hit().setDocumentField(NAME, hitField); } - Terms terms = hitContext.reader().getTermVector(hitContext.docId(), field); + Terms terms = hitContext.reader().termVectors().get(hitContext.docId(), field); if (terms != null) { TermsEnum te = terms.iterator(); Map tv = new HashMap<>(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java index 66d44a818b79..e39f8df9bad3 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java @@ -151,7 +151,7 @@ public class InnerHitsIT extends ESIntegTestCase { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(2)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -171,7 +171,7 @@ public class InnerHitsIT extends ESIntegTestCase { assertThat(response.getHits().getAt(0).getShard(), notNullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment"); - assertThat(innerHits.getTotalHits().value, equalTo(3L)); + assertThat(innerHits.getTotalHits().value(), equalTo(3L)); assertThat(innerHits.getHits().length, equalTo(3)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -196,7 +196,7 @@ public class InnerHitsIT extends ESIntegTestCase { ), response -> { SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(2L)); + assertThat(innerHits.getTotalHits().value(), equalTo(2L)); assertThat(innerHits.getHits().length, equalTo(1)); HighlightField highlightField = innerHits.getAt(0).getHighlightFields().get("comments.message"); assertThat(highlightField.fragments()[0].string(), equalTo("fox eat quick")); @@ -264,7 +264,7 @@ public class InnerHitsIT extends ESIntegTestCase { SearchHit searchHit = response.getHits().getAt(i); assertThat(searchHit.getShard(), notNullValue()); SearchHits inner = searchHit.getInnerHits().get("a"); - assertThat(inner.getTotalHits().value, equalTo((long) field1InnerObjects[i])); + assertThat(inner.getTotalHits().value(), equalTo((long) field1InnerObjects[i])); for (int j = 0; j < field1InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field1")); @@ -273,7 +273,7 @@ public class InnerHitsIT extends ESIntegTestCase { } inner = searchHit.getInnerHits().get("b"); - assertThat(inner.getTotalHits().value, equalTo((long) field2InnerObjects[i])); + assertThat(inner.getTotalHits().value(), equalTo((long) field2InnerObjects[i])); for (int j = 0; j < field2InnerObjects[i] && j < size; j++) { SearchHit innerHit = inner.getAt(j); assertThat(innerHit.getNestedIdentity().getField().string(), equalTo("field2")); @@ -378,13 +378,13 @@ public class InnerHitsIT extends ESIntegTestCase { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -409,13 +409,13 @@ public class InnerHitsIT extends ESIntegTestCase { assertSearchHit(response, 1, hasId("1")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("1")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -436,7 +436,7 @@ public class InnerHitsIT extends ESIntegTestCase { assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments.remarks"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -460,13 +460,13 @@ public class InnerHitsIT extends ESIntegTestCase { assertSearchHit(response, 1, hasId("2")); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comments"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(innerHits.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); innerHits = innerHits.getAt(0).getInnerHits().get("remark"); - assertThat(innerHits.getTotalHits().value, equalTo(1L)); + assertThat(innerHits.getTotalHits().value(), equalTo(1L)); assertThat(innerHits.getHits().length, equalTo(1)); assertThat(innerHits.getAt(0).getId(), equalTo("2")); assertThat(innerHits.getAt(0).getNestedIdentity().getField().string(), equalTo("comments")); @@ -538,7 +538,7 @@ public class InnerHitsIT extends ESIntegTestCase { response -> { assertHitCount(response, 1); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getId(), equalTo("1")); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), @@ -613,7 +613,7 @@ public class InnerHitsIT extends ESIntegTestCase { SearchHit parent = response.getHits().getAt(0); assertThat(parent.getId(), equalTo("1")); SearchHits inner = parent.getInnerHits().get("comments.messages"); - assertThat(inner.getTotalHits().value, equalTo(2L)); + assertThat(inner.getTotalHits().value(), equalTo(2L)); assertThat(inner.getAt(0).getSourceAsString(), equalTo("{\"message\":\"no fox\"}")); assertThat(inner.getAt(1).getSourceAsString(), equalTo("{\"message\":\"fox eat quick\"}")); } @@ -629,7 +629,7 @@ public class InnerHitsIT extends ESIntegTestCase { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(2L)); + assertThat(messages.getTotalHits().value(), equalTo(2L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(2)); @@ -651,7 +651,7 @@ public class InnerHitsIT extends ESIntegTestCase { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getTotalHits().value(), equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(1)); @@ -685,7 +685,7 @@ public class InnerHitsIT extends ESIntegTestCase { SearchHit hit = response.getHits().getAt(0); assertThat(hit.getId(), equalTo("1")); SearchHits messages = hit.getInnerHits().get("comments.messages"); - assertThat(messages.getTotalHits().value, equalTo(1L)); + assertThat(messages.getTotalHits().value(), equalTo(1L)); assertThat(messages.getAt(0).getId(), equalTo("1")); assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages")); assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0)); @@ -786,22 +786,22 @@ public class InnerHitsIT extends ESIntegTestCase { ); assertNoFailuresAndResponse(prepareSearch("test").setQuery(query).setSize(numDocs).addSort("field1", SortOrder.ASC), response -> { assertAllSuccessful(response); - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); assertThat(response.getHits().getAt(0).getId(), equalTo("0")); - assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test1")); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested1").getAt(1).getMatchedQueries()[0], equalTo("test3")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); - assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(1).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test2")); for (int i = 2; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(String.valueOf(i))); - assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries().length, equalTo(1)); assertThat(response.getHits().getAt(i).getInnerHits().get("nested1").getAt(0).getMatchedQueries()[0], equalTo("test3")); } @@ -844,7 +844,7 @@ public class InnerHitsIT extends ESIntegTestCase { response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), @@ -865,7 +865,7 @@ public class InnerHitsIT extends ESIntegTestCase { response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(2)); assertThat( response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().get("message"), @@ -891,7 +891,7 @@ public class InnerHitsIT extends ESIntegTestCase { ), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().size(), equalTo(0)); } ); @@ -901,7 +901,7 @@ public class InnerHitsIT extends ESIntegTestCase { .setQuery(nestedQuery("comments", matchQuery("comments.message", "fox"), ScoreMode.None).innerHit(new InnerHitBuilder())), response -> { assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits().value(), equalTo(2L)); assertFalse(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getSourceAsMap().isEmpty()); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 0ce4f34463b0..0805d0f366b0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -3340,7 +3340,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { new SearchSourceBuilder().query(query).highlighter(new HighlightBuilder().field("*").highlighterType(highlighterType)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getHighlightFields().get("text").fragments().length, equalTo(1)); } ); @@ -3412,7 +3412,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .highlighter(new HighlightBuilder().field("*")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); HighlightField highlightField = response.getHits().getAt(0).getHighlightFields().get("keyword_field"); assertThat(highlightField.fragments()[0].string(), equalTo("some text")); } @@ -3569,7 +3569,7 @@ public class HighlighterSearchIT extends ESIntegTestCase { .should(QueryBuilders.termQuery("field", "hello")) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertHighlight(response, 0, "field", 0, 1, equalTo("hello world")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java index d1eb1ab533ab..16e5e42e00c9 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -191,26 +191,26 @@ public class SearchFieldsIT extends ESIntegTestCase { indicesAdmin().prepareRefresh().get(); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field1"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); }); // field2 is not stored, check that it is not extracted from source. assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field2"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(0)); assertThat(response.getHits().getAt(0).getFields().get("field2"), nullValue()); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); @@ -218,7 +218,7 @@ public class SearchFieldsIT extends ESIntegTestCase { assertResponse( prepareSearch().setQuery(matchAllQuery()).addStoredField("*3").addStoredField("field1").addStoredField("field2"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); @@ -226,20 +226,20 @@ public class SearchFieldsIT extends ESIntegTestCase { } ); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("field*"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); assertThat(response.getHits().getAt(0).getFields().get("field1").getValue().toString(), equalTo("value1")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("f*3"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap(), nullValue()); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); @@ -247,7 +247,7 @@ public class SearchFieldsIT extends ESIntegTestCase { assertThat(response.getHits().getAt(0).getFields().get("field3").getValue().toString(), equalTo("value3")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addStoredField("*").addStoredField("_source"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap(), notNullValue()); assertThat(response.getHits().getAt(0).getFields().size(), equalTo(2)); @@ -311,7 +311,7 @@ public class SearchFieldsIT extends ESIntegTestCase { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['date'].date.millis", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertFalse(response.getHits().getAt(0).hasSource()); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); @@ -342,7 +342,7 @@ public class SearchFieldsIT extends ESIntegTestCase { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value * factor", Map.of("factor", 2.0)) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat(fields, equalTo(singleton("sNum1"))); @@ -429,7 +429,7 @@ public class SearchFieldsIT extends ESIntegTestCase { .setSize(numDocs) .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "_fields._id.value", Collections.emptyMap())), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); for (int i = 0; i < numDocs; i++) { assertThat(response.getHits().getAt(i).getId(), equalTo(Integer.toString(i))); Set fields = new HashSet<>(response.getHits().getAt(i).getFields().keySet()); @@ -638,7 +638,7 @@ public class SearchFieldsIT extends ESIntegTestCase { .addStoredField("boolean_field") .addStoredField("binary_field"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -681,7 +681,7 @@ public class SearchFieldsIT extends ESIntegTestCase { .get(); assertResponse(prepareSearch("my-index").addStoredField("field1").addStoredField("_routing"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).field("field1"), nullValue()); assertThat(response.getHits().getAt(0).field("_routing").getValue().toString(), equalTo("1")); }); @@ -749,7 +749,7 @@ public class SearchFieldsIT extends ESIntegTestCase { String field = "field1.field2.field3.field4"; assertResponse(prepareSearch("my-index").addStoredField(field), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).field(field).getValues().size(), equalTo(2)); assertThat(response.getHits().getAt(0).field(field).getValues().get(0).toString(), equalTo("value1")); assertThat(response.getHits().getAt(0).field(field).getValues().get(1).toString(), equalTo("value2")); @@ -866,7 +866,7 @@ public class SearchFieldsIT extends ESIntegTestCase { builder.addDocValueField("*_field"); } assertResponse(builder, response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -906,7 +906,7 @@ public class SearchFieldsIT extends ESIntegTestCase { assertThat(response.getHits().getAt(0).getFields().get("ip_field").getValues(), equalTo(List.of("::1"))); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).addDocValueField("*field"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( @@ -955,7 +955,7 @@ public class SearchFieldsIT extends ESIntegTestCase { .addDocValueField("double_field", "#.0") .addDocValueField("date_field", "epoch_millis"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); Set fields = new HashSet<>(response.getHits().getAt(0).getFields().keySet()); assertThat( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index 36e75435bb5d..76384253282d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -250,7 +250,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -276,7 +276,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -300,7 +300,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (numDummyDocs + 2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (numDummyDocs + 2))); assertThat(sh.getAt(0).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getId(), anyOf(equalTo("1"), equalTo("2"))); assertThat(sh.getAt(1).getScore(), equalTo(sh.getAt(0).getScore())); @@ -373,7 +373,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat(sh.getAt(1).getId(), equalTo("2")); } @@ -386,7 +386,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat(sh.getAt(1).getId(), equalTo("2")); } @@ -405,7 +405,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); assertThat(sh.getAt(0).getId(), equalTo("2")); assertThat(sh.getAt(1).getId(), equalTo("1")); } @@ -461,7 +461,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); } @@ -481,7 +481,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0f, 1.e-5)); } @@ -528,7 +528,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(1.0, 1.e-5)); } @@ -546,7 +546,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); } @@ -564,7 +564,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(2.0 + 0.5, 1.e-5)); logger.info( @@ -588,7 +588,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo((2.0 + 0.5) / 2, 1.e-5)); } @@ -606,7 +606,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(0.5, 1.e-5)); } @@ -624,7 +624,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { ), response -> { SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (1))); + assertThat(sh.getTotalHits().value(), equalTo((long) (1))); assertThat(sh.getAt(0).getId(), equalTo("1")); assertThat((double) sh.getAt(0).getScore(), closeTo(2.0, 1.e-5)); } @@ -1131,7 +1131,7 @@ public class DecayFunctionScoreIT extends ESIntegTestCase { assertResponse(client().search(new SearchRequest(new String[] {}).source(searchSource().query(baseQuery))), response -> { assertSearchHits(response, "1", "2"); SearchHits sh = response.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) (2))); + assertThat(sh.getTotalHits().value(), equalTo((long) (2))); }); List lonlat = new ArrayList<>(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java index 7fb06c0b8301..a85d133450be 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/ExplainableScriptIT.java @@ -144,7 +144,7 @@ public class ExplainableScriptIT extends ESIntegTestCase { ), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(20L)); + assertThat(hits.getTotalHits().value(), equalTo(20L)); int idCounter = 19; for (SearchHit hit : hits.getHits()) { assertThat(hit.getId(), equalTo(Integer.toString(idCounter))); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index a0fe7e661020..a38c9dc91605 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -145,9 +145,9 @@ public class FunctionScoreIT extends ESIntegTestCase { ), response -> { if (score < minScore) { - assertThat(response.getHits().getTotalHits().value, is(0L)); + assertThat(response.getHits().getTotalHits().value(), is(0L)); } else { - assertThat(response.getHits().getTotalHits().value, is(1L)); + assertThat(response.getHits().getTotalHits().value(), is(1L)); } } ); @@ -167,9 +167,9 @@ public class FunctionScoreIT extends ESIntegTestCase { ), response -> { if (score < minScore) { - assertThat(response.getHits().getTotalHits().value, is(0L)); + assertThat(response.getHits().getTotalHits().value(), is(0L)); } else { - assertThat(response.getHits().getTotalHits().value, is(1L)); + assertThat(response.getHits().getTotalHits().value(), is(1L)); } } ); @@ -224,9 +224,9 @@ public class FunctionScoreIT extends ESIntegTestCase { protected void assertMinScoreSearchResponses(int numDocs, SearchResponse searchResponse, int numMatchingDocs) { assertNoFailures(searchResponse); - assertThat((int) searchResponse.getHits().getTotalHits().value, is(numMatchingDocs)); + assertThat((int) searchResponse.getHits().getTotalHits().value(), is(numMatchingDocs)); int pos = 0; - for (int hitId = numDocs - 1; (numDocs - hitId) < searchResponse.getHits().getTotalHits().value; hitId--) { + for (int hitId = numDocs - 1; (numDocs - hitId) < searchResponse.getHits().getTotalHits().value(); hitId--) { assertThat(searchResponse.getHits().getAt(pos).getId(), equalTo(Integer.toString(hitId))); pos++; } @@ -242,7 +242,7 @@ public class FunctionScoreIT extends ESIntegTestCase { assertNoFailuresAndResponse( client().search(new SearchRequest(new String[] {}).source(searchSource().explain(true).query(termQuery("text", "text")))), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); termQueryScore[0] = response.getHits().getAt(0).getScore(); } ); @@ -259,7 +259,7 @@ public class FunctionScoreIT extends ESIntegTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getScore(), equalTo(expectedScore)); } ); @@ -269,7 +269,7 @@ public class FunctionScoreIT extends ESIntegTestCase { searchSource().explain(true).query(functionScoreQuery(termQuery("text", "text")).boostMode(boostMode).setMinScore(2f)) ) ), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(0L)) ); } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index 6043688b7670..9fed4ead8c24 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -149,7 +149,7 @@ public class QueryRescorerIT extends ESIntegTestCase { 5 ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getMaxScore(), equalTo(response.getHits().getHits()[0].getScore())); assertThat(response.getHits().getHits()[0].getId(), equalTo("1")); assertThat(response.getHits().getHits()[1].getId(), equalTo("3")); @@ -429,7 +429,7 @@ public class QueryRescorerIT extends ESIntegTestCase { assertNoFailures(rescored); SearchHits leftHits = plain.getHits(); SearchHits rightHits = rescored.getHits(); - assertThat(leftHits.getTotalHits().value, equalTo(rightHits.getTotalHits().value)); + assertThat(leftHits.getTotalHits().value(), equalTo(rightHits.getTotalHits().value())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] rHits = rightHits.getHits(); @@ -855,7 +855,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setTrackScores(true) .addRescorer(new QueryRescorerBuilder(matchAllQuery()).setRescoreQueryWeight(100.0f), 50), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(5L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(5L)); assertThat(response.getHits().getHits().length, equalTo(5)); for (SearchHit hit : response.getHits().getHits()) { assertThat(hit.getScore(), equalTo(101f)); @@ -902,7 +902,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .addRescorer(new QueryRescorerBuilder(fieldValueScoreQuery("secondPassScore"))) .setCollapse(new CollapseBuilder("group")); assertResponse(request, resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(5L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(5L)); assertThat(resp.getHits().getHits().length, equalTo(3)); SearchHit hit1 = resp.getHits().getAt(0); @@ -982,7 +982,7 @@ public class QueryRescorerIT extends ESIntegTestCase { .setSize(Math.min(numGroups, 10)); long expectedNumHits = numHits; assertResponse(request, resp -> { - assertThat(resp.getHits().getTotalHits().value, equalTo(expectedNumHits)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits)); for (int pos = 0; pos < resp.getHits().getHits().length; pos++) { SearchHit hit = resp.getHits().getAt(pos); assertThat(hit.getId(), equalTo(sortedGroups[pos].id())); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java index 7fdb31a46899..22e27d78531a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/functionscore/RandomScoreFunctionIT.java @@ -268,7 +268,7 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { .setExplain(true), response -> { assertNoFailures(response); - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); SearchHit firstHit = response.getHits().getAt(0); assertThat(firstHit.getExplanation().toString(), containsString("" + seed)); } @@ -283,12 +283,12 @@ public class RandomScoreFunctionIT extends ESIntegTestCase { prepareSearch("test").setQuery( functionScoreQuery(matchAllQuery(), randomFunction().seed(1234).setField(SeqNoFieldMapper.NAME)) ), - response -> assertEquals(0, response.getHits().getTotalHits().value) + response -> assertEquals(0, response.getHits().getTotalHits().value()) ); assertNoFailuresAndResponse( prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery(), randomFunction())), - response -> assertEquals(0, response.getHits().getTotalHits().value) + response -> assertEquals(0, response.getHits().getTotalHits().value()) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java index 9b574cb54a11..2fde645f0036 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/nested/SimpleNestedIT.java @@ -426,7 +426,7 @@ public class SimpleNestedIT extends ESIntegTestCase { .setExplain(true), response -> { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); Explanation explanation = response.getHits().getHits()[0].getExplanation(); assertThat(explanation.getValue(), equalTo(response.getHits().getHits()[0].getScore())); assertThat(explanation.toString(), startsWith("0.36464313 = Score based on 2 child docs in range from 0 to 1")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java index 6993f24b895e..e6cd89c09b97 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/profile/query/QueryProfilerIT.java @@ -147,10 +147,10 @@ public class QueryProfilerIT extends ESIntegTestCase { ); } - if (vanillaResponse.getHits().getTotalHits().value != profileResponse.getHits().getTotalHits().value) { + if (vanillaResponse.getHits().getTotalHits().value() != profileResponse.getHits().getTotalHits().value()) { Set vanillaSet = new HashSet<>(Arrays.asList(vanillaResponse.getHits().getHits())); Set profileSet = new HashSet<>(Arrays.asList(profileResponse.getHits().getHits())); - if (vanillaResponse.getHits().getTotalHits().value > profileResponse.getHits().getTotalHits().value) { + if (vanillaResponse.getHits().getTotalHits().value() > profileResponse.getHits().getTotalHits().value()) { vanillaSet.removeAll(profileSet); fail("Vanilla hits were larger than profile hits. Non-overlapping elements were: " + vanillaSet.toString()); } else { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java index f263ececfdc7..26b040e2309c 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/ExistsIT.java @@ -133,7 +133,7 @@ public class ExistsIT extends ESIntegTestCase { response ), count, - response.getHits().getTotalHits().value + response.getHits().getTotalHits().value() ); } catch (AssertionError e) { for (SearchHit searchHit : allDocs.getHits()) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java index 96042e198ef4..0fd2bd6f9477 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/MultiMatchQueryIT.java @@ -347,7 +347,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { ).type(MatchQueryParser.Type.PHRASE) ) ), - response -> assertThat(response.getHits().getTotalHits().value, greaterThan(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), greaterThan(1L)) ); assertSearchHitsWithoutFailures( @@ -428,8 +428,8 @@ public class MultiMatchQueryIT extends ESIntegTestCase { matchResp -> { assertThat( "field: " + field + " query: " + builder.toString(), - multiMatchResp.getHits().getTotalHits().value, - equalTo(matchResp.getHits().getTotalHits().value) + multiMatchResp.getHits().getTotalHits().value(), + equalTo(matchResp.getHits().getTotalHits().value()) ); SearchHits hits = multiMatchResp.getHits(); if (field.startsWith("missing")) { @@ -451,7 +451,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { var response = prepareSearch("test").setSize(0).setQuery(matchAllQuery()).get(); final int numDocs; try { - numDocs = (int) response.getHits().getTotalHits().value; + numDocs = (int) response.getHits().getTotalHits().value(); } finally { response.decRef(); } @@ -944,7 +944,7 @@ public class MultiMatchQueryIT extends ESIntegTestCase { assertNoFailures(right); SearchHits leftHits = left.getHits(); SearchHits rightHits = right.getHits(); - assertThat(leftHits.getTotalHits().value, equalTo(rightHits.getTotalHits().value)); + assertThat(leftHits.getTotalHits().value(), equalTo(rightHits.getTotalHits().value())); assertThat(leftHits.getHits().length, equalTo(rightHits.getHits().length)); SearchHit[] hits = leftHits.getHits(); SearchHit[] rHits = rightHits.getHits(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java index e25e330e072a..c8fe9498b156 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/QueryStringIT.java @@ -263,7 +263,7 @@ public class QueryStringIT extends ESIntegTestCase { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java index 45b98686e048..cffba49d5941 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -10,7 +10,7 @@ package org.elasticsearch.search.query; import org.apache.lucene.analysis.pattern.PatternReplaceCharFilter; -import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.tests.analysis.MockTokenizer; @@ -264,7 +264,7 @@ public class SearchQueryIT extends ESIntegTestCase { MatchQueryBuilder matchQuery = matchQuery("f", English.intToEnglish(between(0, num))); final long[] constantScoreTotalHits = new long[1]; assertResponse(prepareSearch("test_1").setQuery(constantScoreQuery(matchQuery)).setSize(num), response -> { - constantScoreTotalHits[0] = response.getHits().getTotalHits().value; + constantScoreTotalHits[0] = response.getHits().getTotalHits().value(); SearchHits hits = response.getHits(); for (SearchHit searchHit : hits) { assertThat(searchHit, hasScore(1.0f)); @@ -277,7 +277,7 @@ public class SearchQueryIT extends ESIntegTestCase { ).setSize(num), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(constantScoreTotalHits[0])); + assertThat(hits.getTotalHits().value(), equalTo(constantScoreTotalHits[0])); if (constantScoreTotalHits[0] > 1) { float expected = hits.getAt(0).getScore(); for (SearchHit searchHit : hits) { @@ -1693,7 +1693,7 @@ public class SearchQueryIT extends ESIntegTestCase { assertResponse( prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); first[0] = response.getHits().getAt(0).getScore(); } @@ -1704,7 +1704,7 @@ public class SearchQueryIT extends ESIntegTestCase { prepareSearch("test").setSearchType(SearchType.DFS_QUERY_THEN_FETCH) .setQuery(QueryBuilders.queryStringQuery("xyz").boost(100)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); float actual = response.getHits().getAt(0).getScore(); assertThat(finalI + " expected: " + first[0] + " actual: " + actual, Float.compare(first[0], actual), equalTo(0)); @@ -1917,7 +1917,9 @@ public class SearchQueryIT extends ESIntegTestCase { } /** - * Test correct handling {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexReader, MultiTermQuery)}. That rewrite method is e.g. + * Test correct handling + * {@link SpanBooleanQueryRewriteWithMaxClause#rewrite(IndexSearcher, MultiTermQuery)}. + * That rewrite method is e.g. * set for fuzzy queries with "constant_score" rewrite nested inside a `span_multi` query and would cause NPEs due to an unset * {@link AttributeSource}. */ diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index 35f11eb1429b..522c20b687ca 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -609,7 +609,7 @@ public class SimpleQueryStringIT extends ESIntegTestCase { } private void assertHits(SearchHits hits, String... ids) { - assertThat(hits.getTotalHits().value, equalTo((long) ids.length)); + assertThat(hits.getTotalHits().value(), equalTo((long) ids.length)); Set hitIds = new HashSet<>(); for (SearchHit hit : hits.getHits()) { hitIds.add(hit.getId()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java index 13a7d1fa5949..97aa428822fa 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/MinimalCompoundRetrieverIT.java @@ -75,7 +75,7 @@ public class MinimalCompoundRetrieverIT extends AbstractMultiClustersTestCase { assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.RUNNING), equalTo(0)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.PARTIAL), equalTo(0)); assertThat(clusters.getClusterStateCount(SearchResponse.Cluster.Status.FAILED), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(testClusterInfo.get("total_docs"))); + assertThat(response.getHits().getTotalHits().value(), equalTo(testClusterInfo.get("total_docs"))); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java index 43197b77b2c1..25b43a2dc946 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/retriever/RetrieverRewriteIT.java @@ -78,8 +78,8 @@ public class RetrieverRewriteIT extends ESIntegTestCase { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_0")); }); } @@ -91,8 +91,8 @@ public class RetrieverRewriteIT extends ESIntegTestCase { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(1L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(1L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); }); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java index 35990fa3755b..9a7ce2c5c28a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchPreferenceIT.java @@ -123,17 +123,17 @@ public class SearchPreferenceIT extends ESIntegTestCase { assertResponse( prepareSearch().setQuery(matchAllQuery()), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); assertResponse( prepareSearch().setQuery(matchAllQuery()).setPreference("_local"), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); assertResponse( prepareSearch().setQuery(matchAllQuery()).setPreference("1234"), - response -> assertThat(response.getHits().getTotalHits().value, equalTo(1L)) + response -> assertThat(response.getHits().getTotalHits().value(), equalTo(1L)) ); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java index 33b554a508e2..06ce330213af 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/routing/SearchReplicaSelectionIT.java @@ -51,15 +51,15 @@ public class SearchReplicaSelectionIT extends ESIntegTestCase { // Before we've gathered stats for all nodes, we should try each node once. Set nodeIds = new HashSet<>(); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertResponse(client.prepareSearch().setQuery(matchAllQuery()), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); nodeIds.add(response.getHits().getAt(0).getShard().getNodeId()); }); assertEquals(3, nodeIds.size()); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java index 2c96c27a0d12..f59be6bb7592 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scriptfilter/ScriptQuerySearchIT.java @@ -122,7 +122,7 @@ public class ScriptQuerySearchIT extends ESIntegTestCase { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['binaryData'].get(0).length", emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(0).getFields().get("sbinaryData").getValues().get(0), equalTo(16)); } @@ -175,7 +175,7 @@ public class ScriptQuerySearchIT extends ESIntegTestCase { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(2.0)); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); @@ -196,7 +196,7 @@ public class ScriptQuerySearchIT extends ESIntegTestCase { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(3.0)); } @@ -214,7 +214,7 @@ public class ScriptQuerySearchIT extends ESIntegTestCase { new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['num1'].value", Collections.emptyMap()) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getFields().get("sNum1").getValues().get(0), equalTo(1.0)); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java index d3da4639a392..ac5738a9b67b 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/DuelScrollIT.java @@ -44,7 +44,7 @@ public class DuelScrollIT extends ESIntegTestCase { prepareSearch("index").setSearchType(context.searchType).addSort(context.sort).setSize(context.numDocs), control -> { SearchHits sh = control.getHits(); - assertThat(sh.getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(sh.getTotalHits().value(), equalTo((long) context.numDocs)); assertThat(sh.getHits().length, equalTo(context.numDocs)); SearchResponse searchScrollResponse = prepareSearch("index").setSearchType(context.searchType) @@ -55,7 +55,7 @@ public class DuelScrollIT extends ESIntegTestCase { try { assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getTotalHits().value(), equalTo((long) context.numDocs)); assertThat(searchScrollResponse.getHits().getHits().length, equalTo(context.scrollRequestSize)); int counter = 0; @@ -69,7 +69,7 @@ public class DuelScrollIT extends ESIntegTestCase { searchScrollResponse.decRef(); searchScrollResponse = client().prepareSearchScroll(scrollId).setScroll(TimeValue.timeValueMinutes(10)).get(); assertNoFailures(searchScrollResponse); - assertThat(searchScrollResponse.getHits().getTotalHits().value, equalTo((long) context.numDocs)); + assertThat(searchScrollResponse.getHits().getTotalHits().value(), equalTo((long) context.numDocs)); if (searchScrollResponse.getHits().getHits().length == 0) { break; } @@ -241,7 +241,7 @@ public class DuelScrollIT extends ESIntegTestCase { try { while (true) { assertNoFailures(scroll); - assertEquals(control.getHits().getTotalHits().value, scroll.getHits().getTotalHits().value); + assertEquals(control.getHits().getTotalHits().value(), scroll.getHits().getTotalHits().value()); assertEquals(control.getHits().getMaxScore(), scroll.getHits().getMaxScore(), 0.01f); if (scroll.getHits().getHits().length == 0) { break; @@ -255,7 +255,7 @@ public class DuelScrollIT extends ESIntegTestCase { scroll.decRef(); scroll = client().prepareSearchScroll(scroll.getScrollId()).setScroll(TimeValue.timeValueMinutes(10)).get(); } - assertEquals(control.getHits().getTotalHits().value, scrollDocs); + assertEquals(control.getHits().getTotalHits().value(), scrollDocs); } catch (AssertionError e) { logger.info("Control:\n{}", control); logger.info("Scroll size={}, from={}:\n{}", size, scrollDocs, scroll); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java index 7c3dde22ce9d..7ac24b77a4b6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -89,7 +89,7 @@ public class SearchScrollIT extends ESIntegTestCase { try { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -98,7 +98,7 @@ public class SearchScrollIT extends ESIntegTestCase { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -107,7 +107,7 @@ public class SearchScrollIT extends ESIntegTestCase { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(30)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -145,7 +145,7 @@ public class SearchScrollIT extends ESIntegTestCase { try { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -155,7 +155,7 @@ public class SearchScrollIT extends ESIntegTestCase { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(3)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -166,7 +166,7 @@ public class SearchScrollIT extends ESIntegTestCase { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -176,7 +176,7 @@ public class SearchScrollIT extends ESIntegTestCase { searchResponse.decRef(); searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(0)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -262,7 +262,7 @@ public class SearchScrollIT extends ESIntegTestCase { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -278,7 +278,7 @@ public class SearchScrollIT extends ESIntegTestCase { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -289,7 +289,7 @@ public class SearchScrollIT extends ESIntegTestCase { searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -300,7 +300,7 @@ public class SearchScrollIT extends ESIntegTestCase { searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -381,7 +381,7 @@ public class SearchScrollIT extends ESIntegTestCase { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -397,7 +397,7 @@ public class SearchScrollIT extends ESIntegTestCase { .addSort("field", SortOrder.ASC) .get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -408,7 +408,7 @@ public class SearchScrollIT extends ESIntegTestCase { searchResponse1 = client().prepareSearchScroll(searchResponse1.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse1.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse1.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter1++)); @@ -419,7 +419,7 @@ public class SearchScrollIT extends ESIntegTestCase { searchResponse2 = client().prepareSearchScroll(searchResponse2.getScrollId()).setScroll(TimeValue.timeValueMinutes(2)).get(); try { - assertThat(searchResponse2.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse2.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse2.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse2.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter2++)); @@ -535,7 +535,7 @@ public class SearchScrollIT extends ESIntegTestCase { prepareSearch().setQuery(matchAllQuery()).setSize(35).setScroll(TimeValue.timeValueMinutes(2)).addSort("field", SortOrder.ASC), searchResponse -> { long counter = 0; - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(100L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); for (SearchHit hit : searchResponse.getHits()) { assertThat(((Number) hit.getSortValues()[0]).longValue(), equalTo(counter++)); @@ -601,7 +601,7 @@ public class SearchScrollIT extends ESIntegTestCase { assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(1).setScroll(TimeValue.timeValueMinutes(5)), searchResponse -> { assertNotNull(searchResponse.getScrollId()); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, equalTo(1)); Exception ex = expectThrows( Exception.class, diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index 7c459f91a1ac..353858e9d697 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -150,7 +150,7 @@ public class SearchAfterIT extends ESIntegTestCase { .setQuery(matchAllQuery()) .searchAfter(new Object[] { 0, null }), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo(2L)); + assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo(2L)); assertThat(searchResponse.getHits().getHits().length, Matchers.equalTo(1)); assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field1"), Matchers.equalTo(100)); assertThat(searchResponse.getHits().getHits()[0].getSourceAsMap().get("field2"), Matchers.equalTo("toto")); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java index a62a042a3cab..e87c4790aa66 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -555,7 +555,7 @@ public class SimpleSearchIT extends ESIntegTestCase { assertNoFailuresAndResponse( prepareSearch("test_count_1", "test_count_2").setTrackTotalHits(true).setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(11L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(11L)); assertThat(response.getHits().getHits().length, equalTo(0)); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java index 979cb9e8a8c4..e07999400375 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -117,7 +117,7 @@ public class SearchSliceIT extends ESIntegTestCase { setupIndex(totalDocs, numShards); assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setPreference("_shards:1,4").setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) @@ -129,7 +129,7 @@ public class SearchSliceIT extends ESIntegTestCase { }); assertResponse(prepareSearch("test").setQuery(matchAllQuery()).setRouting("foo", "bar").setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("test").setQuery(matchAllQuery()) @@ -147,7 +147,7 @@ public class SearchSliceIT extends ESIntegTestCase { .addAliasAction(IndicesAliasesRequest.AliasActions.add().index("test").alias("alias3").routing("baz")) ); assertResponse(prepareSearch("alias1", "alias3").setQuery(matchAllQuery()).setSize(0), sr -> { - int numDocs = (int) sr.getHits().getTotalHits().value; + int numDocs = (int) sr.getHits().getTotalHits().value(); int max = randomIntBetween(2, numShards * 3); int fetchSize = randomIntBetween(10, 100); SearchRequestBuilder request = prepareSearch("alias1", "alias3").setQuery(matchAllQuery()) @@ -166,7 +166,7 @@ public class SearchSliceIT extends ESIntegTestCase { SearchResponse searchResponse = request.slice(sliceBuilder).get(); try { totalResults += searchResponse.getHits().getHits().length; - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value(); int numSliceResults = searchResponse.getHits().getHits().length; String scrollId = searchResponse.getScrollId(); for (SearchHit hit : searchResponse.getHits().getHits()) { @@ -238,7 +238,7 @@ public class SearchSliceIT extends ESIntegTestCase { SearchResponse searchResponse = request.get(); try { - int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value; + int expectedSliceResults = (int) searchResponse.getHits().getTotalHits().value(); while (true) { int numHits = searchResponse.getHits().getHits().length; diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java index 3be427e37d60..d1841ebaf807 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/FieldSortIT.java @@ -286,7 +286,7 @@ public class FieldSortIT extends ESIntegTestCase { assertNoFailuresAndResponse( prepareSearch("test").setQuery(matchAllQuery()).setSize(size).addSort("dense_bytes", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) numDocs)); assertThat(response.getHits().getHits().length, equalTo(size)); Set> entrySet = denseBytes.entrySet(); Iterator> iterator = entrySet.iterator(); @@ -307,7 +307,7 @@ public class FieldSortIT extends ESIntegTestCase { .setSize(size) .addSort("sparse_bytes", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo((long) sparseBytes.size())); + assertThat(response.getHits().getTotalHits().value(), equalTo((long) sparseBytes.size())); assertThat(response.getHits().getHits().length, equalTo(size)); Set> entrySet = sparseBytes.entrySet(); Iterator> iterator = entrySet.iterator(); @@ -818,7 +818,7 @@ public class FieldSortIT extends ESIntegTestCase { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -828,7 +828,7 @@ public class FieldSortIT extends ESIntegTestCase { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_last")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -838,7 +838,7 @@ public class FieldSortIT extends ESIntegTestCase { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addSort(SortBuilders.fieldSort("i_value").order(SortOrder.ASC).missing("_first")), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -884,7 +884,7 @@ public class FieldSortIT extends ESIntegTestCase { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -896,7 +896,7 @@ public class FieldSortIT extends ESIntegTestCase { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("3")); assertThat(response.getHits().getAt(2).getId(), equalTo("2")); @@ -908,7 +908,7 @@ public class FieldSortIT extends ESIntegTestCase { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -920,7 +920,7 @@ public class FieldSortIT extends ESIntegTestCase { response -> { assertThat(Arrays.toString(response.getShardFailures()), response.getFailedShards(), equalTo(0)); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(2).getId(), equalTo("3")); @@ -1183,7 +1183,7 @@ public class FieldSortIT extends ESIntegTestCase { refresh(); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1197,7 +1197,7 @@ public class FieldSortIT extends ESIntegTestCase { }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("long_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1214,7 +1214,7 @@ public class FieldSortIT extends ESIntegTestCase { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.SUM)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1232,7 +1232,7 @@ public class FieldSortIT extends ESIntegTestCase { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.AVG)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1250,7 +1250,7 @@ public class FieldSortIT extends ESIntegTestCase { .setSize(10) .addSort(SortBuilders.fieldSort("long_values").order(SortOrder.DESC).sortMode(SortMode.MEDIAN)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1264,7 +1264,7 @@ public class FieldSortIT extends ESIntegTestCase { } ); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1277,7 +1277,7 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("int_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1290,7 +1290,7 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1303,7 +1303,7 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("short_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1316,7 +1316,7 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1329,7 +1329,7 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(7)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("byte_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1342,7 +1342,7 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).intValue(), equalTo(3)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1355,7 +1355,7 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(7f)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("float_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1368,7 +1368,7 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).floatValue(), equalTo(3f)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1381,7 +1381,7 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(7d)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("double_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1394,7 +1394,7 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(((Number) response.getHits().getAt(2).getSortValues()[0]).doubleValue(), equalTo(3d)); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.ASC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(3))); @@ -1407,7 +1407,7 @@ public class FieldSortIT extends ESIntegTestCase { assertThat(response.getHits().getAt(2).getSortValues()[0], equalTo("07")); }); assertResponse(prepareSearch().setQuery(matchAllQuery()).setSize(10).addSort("string_values", SortOrder.DESC), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), equalTo(Integer.toString(2))); @@ -1719,8 +1719,8 @@ public class FieldSortIT extends ESIntegTestCase { prepareSearch("test2").setFrom(from).setSize(size).addSort(sortField, order), singleShardResponse -> { assertThat( - multiShardResponse.getHits().getTotalHits().value, - equalTo(singleShardResponse.getHits().getTotalHits().value) + multiShardResponse.getHits().getTotalHits().value(), + equalTo(singleShardResponse.getHits().getTotalHits().value()) ); assertThat(multiShardResponse.getHits().getHits().length, equalTo(singleShardResponse.getHits().getHits().length)); for (int i = 0; i < multiShardResponse.getHits().getHits().length; i++) { @@ -1747,14 +1747,14 @@ public class FieldSortIT extends ESIntegTestCase { ); assertNoFailuresAndResponse(prepareSearch("test").addSort(SortBuilders.fieldSort("ip")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertArrayEquals(new String[] { "192.168.1.7" }, response.getHits().getAt(0).getSortValues()); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(1).getSortValues()); }); assertNoFailuresAndResponse( prepareSearch("test").addSort(SortBuilders.fieldSort("ip")).searchAfter(new Object[] { "192.168.1.7" }), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals(1, response.getHits().getHits().length); assertArrayEquals(new String[] { "2001:db8::ff00:42:8329" }, response.getHits().getAt(0).getSortValues()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java index ae0d2cbeb841..fc5d40ae18c1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/sort/SimpleSortIT.java @@ -362,7 +362,7 @@ public class SimpleSortIT extends ESIntegTestCase { assertNoFailuresAndResponse( prepareSearch().setQuery(matchAllQuery()).addScriptField("id", scripField).addSort("svalue", SortOrder.ASC), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -373,7 +373,7 @@ public class SimpleSortIT extends ESIntegTestCase { .addScriptField("id", new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['id'][0]", Collections.emptyMap())) .addSort("svalue", SortOrder.ASC), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -391,7 +391,7 @@ public class SimpleSortIT extends ESIntegTestCase { } assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("3")); assertThat(searchResponse.getHits().getAt(1).field("id").getValue(), equalTo("1")); assertThat(searchResponse.getHits().getAt(2).field("id").getValue(), equalTo("2")); @@ -409,7 +409,7 @@ public class SimpleSortIT extends ESIntegTestCase { } assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).field("id").getValue(), equalTo("2")); } ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java index 6351d8d90638..ec9c680e17fc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/source/MetadataFetchingIT.java @@ -64,12 +64,12 @@ public class MetadataFetchingIT extends ESIntegTestCase { ) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getAt(0).getId(), nullValue()); assertThat(response.getHits().getAt(0).getSourceAsString(), nullValue()); assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1)); SearchHits hits = response.getHits().getAt(0).getInnerHits().get("nested"); - assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getTotalHits().value(), equalTo(1L)); assertThat(hits.getAt(0).getId(), nullValue()); assertThat(hits.getAt(0).getSourceAsString(), nullValue()); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java index 2952150c2cb2..f90056c6ae85 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/similarity/SimilarityIT.java @@ -54,10 +54,10 @@ public class SimilarityIT extends ESIntegTestCase { .get(); assertResponse(prepareSearch().setQuery(matchQuery("field1", "quick brown fox")), bm25SearchResponse -> { - assertThat(bm25SearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(bm25SearchResponse.getHits().getTotalHits().value(), equalTo(1L)); float bm25Score = bm25SearchResponse.getHits().getHits()[0].getScore(); assertResponse(prepareSearch().setQuery(matchQuery("field2", "quick brown fox")), booleanSearchResponse -> { - assertThat(booleanSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(booleanSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); float defaultScore = booleanSearchResponse.getHits().getHits()[0].getScore(); assertThat(bm25Score, not(equalTo(defaultScore))); }); diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 414a6c6ba66a..89fc5f676cb1 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -458,7 +458,8 @@ module org.elasticsearch.server { provides org.apache.lucene.codecs.Codec with org.elasticsearch.index.codec.Elasticsearch814Codec, - org.elasticsearch.index.codec.Elasticsearch816Codec; + org.elasticsearch.index.codec.Elasticsearch816Codec, + org.elasticsearch.index.codec.Elasticsearch900Codec; provides org.apache.logging.log4j.core.util.ContextDataProvider with org.elasticsearch.common.logging.DynamicContextDataProvider; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java index 666708ea6ffd..e66862444035 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzer.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.PointValues; @@ -273,7 +274,7 @@ final class IndexDiskUsageAnalyzer { } case SORTED_SET -> { SortedSetDocValues sortedSet = iterateDocValues(maxDocs, () -> docValuesReader.getSortedSet(field), dv -> { - while (dv.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + for (int i = 0; i < dv.docValueCount(); i++) { cancellationChecker.logEvent(); } }); @@ -544,13 +545,14 @@ final class IndexDiskUsageAnalyzer { if (field.getVectorDimension() > 0) { switch (field.getVectorEncoding()) { case BYTE -> { - iterateDocValues(reader.maxDoc(), () -> vectorReader.getByteVectorValues(field.name), vectors -> { + iterateDocValues(reader.maxDoc(), () -> vectorReader.getByteVectorValues(field.name).iterator(), vectors -> { cancellationChecker.logEvent(); - vectors.vectorValue(); + vectors.index(); }); // do a couple of randomized searches to figure out min and max offsets of index file ByteVectorValues vectorValues = vectorReader.getByteVectorValues(field.name); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); final KnnCollector collector = new TopKnnCollector( Math.max(1, Math.min(100, vectorValues.size() - 1)), Integer.MAX_VALUE @@ -558,22 +560,23 @@ final class IndexDiskUsageAnalyzer { int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { - if ((i = vectorValues.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { + if ((i = iterator.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { break; } cancellationChecker.checkForCancellation(); - vectorReader.search(field.name, vectorValues.vectorValue(), collector, null); + vectorReader.search(field.name, vectorValues.vectorValue(iterator.index()), collector, null); } stats.addKnnVectors(field.name, directory.getBytesRead()); } case FLOAT32 -> { - iterateDocValues(reader.maxDoc(), () -> vectorReader.getFloatVectorValues(field.name), vectors -> { + iterateDocValues(reader.maxDoc(), () -> vectorReader.getFloatVectorValues(field.name).iterator(), vectors -> { cancellationChecker.logEvent(); - vectors.vectorValue(); + vectors.index(); }); // do a couple of randomized searches to figure out min and max offsets of index file FloatVectorValues vectorValues = vectorReader.getFloatVectorValues(field.name); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); final KnnCollector collector = new TopKnnCollector( Math.max(1, Math.min(100, vectorValues.size() - 1)), Integer.MAX_VALUE @@ -581,11 +584,11 @@ final class IndexDiskUsageAnalyzer { int numDocsToVisit = reader.maxDoc() < 10 ? reader.maxDoc() : 10 * (int) Math.log10(reader.maxDoc()); int skipFactor = Math.max(reader.maxDoc() / numDocsToVisit, 1); for (int i = 0; i < reader.maxDoc(); i += skipFactor) { - if ((i = vectorValues.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { + if ((i = iterator.advance(i)) == DocIdSetIterator.NO_MORE_DOCS) { break; } cancellationChecker.checkForCancellation(); - vectorReader.search(field.name, vectorValues.vectorValue(), collector, null); + vectorReader.search(field.name, vectorValues.vectorValue(iterator.index()), collector, null); } stats.addKnnVectors(field.name, directory.getBytesRead()); } diff --git a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java index 8ac2033e2ff1..dda589a458f8 100644 --- a/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java +++ b/server/src/main/java/org/elasticsearch/action/search/BottomSortValuesCollector.java @@ -54,7 +54,7 @@ class BottomSortValuesCollector { } synchronized void consumeTopDocs(TopFieldDocs topDocs, DocValueFormat[] sortValuesFormat) { - totalHits += topDocs.totalHits.value; + totalHits += topDocs.totalHits.value(); if (validateShardSortFields(topDocs.fields) == false) { return; } diff --git a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java index d41a2561646b..b52d76aac413 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java +++ b/server/src/main/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumer.java @@ -57,8 +57,8 @@ class CountOnlyQueryPhaseResultConsumer extends SearchPhaseResults GEO_DISTANCE_SORT_TYPE_CLASS = LatLonDocValuesField.newDistanceSort("some_geo_field", 0, 0).getClass(); public static void writeTotalHits(StreamOutput out, TotalHits totalHits) throws IOException { - out.writeVLong(totalHits.value); - out.writeEnum(totalHits.relation); + out.writeVLong(totalHits.value()); + out.writeEnum(totalHits.relation()); } public static void writeTopDocs(StreamOutput out, TopDocsAndMaxScore topDocs) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java index 625438ebdff9..cbceef120b87 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/index/FilterableTermsEnum.java @@ -27,6 +27,7 @@ import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.elasticsearch.core.Nullable; import java.io.IOException; @@ -177,6 +178,11 @@ public class FilterableTermsEnum extends TermsEnum { } } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) { + return () -> this.seekExact(bytesRef); + } + @Override public int docFreq() throws IOException { return currentDocFreq; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java index 5bc52253939a..9460aba0a99c 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/AutomatonQueries.java @@ -14,7 +14,6 @@ import org.apache.lucene.search.AutomatonQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.ArrayList; @@ -38,8 +37,6 @@ public class AutomatonQueries { Automaton a = Operations.concatenate(list); // since all elements in the list should be deterministic already, the concatenation also is, so no need to determinized assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); - assert a.isDeterministic(); return a; } @@ -100,7 +97,7 @@ public class AutomatonQueries { i += length; } - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } protected static Automaton toCaseInsensitiveString(BytesRef br) { @@ -117,7 +114,6 @@ public class AutomatonQueries { Automaton a = Operations.concatenate(list); // concatenating deterministic automata should result in a deterministic automaton. No need to determinize here. assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); return a; } @@ -132,7 +128,6 @@ public class AutomatonQueries { if (altCase != codepoint) { result = Operations.union(case1, Automata.makeChar(altCase)); // this automaton should always be deterministic, no need to determinize - result = MinimizationOperations.minimize(result, 0); assert result.isDeterministic(); } else { result = case1; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java index b6f102a98203..65688b69f5aa 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitivePrefixQuery.java @@ -20,12 +20,12 @@ public class CaseInsensitivePrefixQuery extends AutomatonQuery { super(term, caseInsensitivePrefix(term.text())); } - public CaseInsensitivePrefixQuery(Term term, int determinizeWorkLimit, boolean isBinary) { - super(term, caseInsensitivePrefix(term.text()), determinizeWorkLimit, isBinary); + public CaseInsensitivePrefixQuery(Term term, boolean isBinary) { + super(term, caseInsensitivePrefix(term.text()), isBinary); } - public CaseInsensitivePrefixQuery(Term term, int determinizeWorkLimit, boolean isBinary, MultiTermQuery.RewriteMethod rewriteMethod) { - super(term, caseInsensitivePrefix(term.text()), determinizeWorkLimit, isBinary, rewriteMethod); + public CaseInsensitivePrefixQuery(Term term, boolean isBinary, MultiTermQuery.RewriteMethod rewriteMethod) { + super(term, caseInsensitivePrefix(term.text()), isBinary, rewriteMethod); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java index 91700e5ffe6c..6368acf38312 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/CaseInsensitiveWildcardQuery.java @@ -26,8 +26,8 @@ public class CaseInsensitiveWildcardQuery extends AutomatonQuery { super(term, toCaseInsensitiveWildcardAutomaton(term)); } - public CaseInsensitiveWildcardQuery(Term term, int determinizeWorkLimit, boolean isBinary, RewriteMethod rewriteMethod) { - super(term, toCaseInsensitiveWildcardAutomaton(term), determinizeWorkLimit, isBinary, rewriteMethod); + public CaseInsensitiveWildcardQuery(Term term, boolean isBinary, RewriteMethod rewriteMethod) { + super(term, toCaseInsensitiveWildcardAutomaton(term), isBinary, rewriteMethod); } @Override diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index 25fa926ada2c..e2ac58caccd5 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -123,7 +123,7 @@ public class Queries { } int optionalClauses = 0; for (BooleanClause c : query.clauses()) { - if (c.getOccur() == BooleanClause.Occur.SHOULD) { + if (c.occur() == BooleanClause.Occur.SHOULD) { optionalClauses++; } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java index 13fae303909f..299739fc3ba8 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/SpanBooleanQueryRewriteWithMaxClause.java @@ -19,7 +19,7 @@ import org.apache.lucene.queries.spans.SpanMultiTermQueryWrapper; import org.apache.lucene.queries.spans.SpanOrQuery; import org.apache.lucene.queries.spans.SpanQuery; import org.apache.lucene.queries.spans.SpanTermQuery; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.AttributeSource; @@ -42,7 +42,7 @@ public class SpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrap private final boolean hardLimit; public SpanBooleanQueryRewriteWithMaxClause() { - this(BooleanQuery.getMaxClauseCount(), true); + this(IndexSearcher.getMaxClauseCount(), true); } public SpanBooleanQueryRewriteWithMaxClause(int maxExpansions, boolean hardLimit) { @@ -59,10 +59,11 @@ public class SpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrap } @Override - public SpanQuery rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + public SpanQuery rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException { final MultiTermQuery.RewriteMethod delegate = new MultiTermQuery.RewriteMethod() { @Override - public Query rewrite(IndexReader reader, MultiTermQuery query) throws IOException { + public Query rewrite(IndexSearcher indexSearcher, MultiTermQuery query) throws IOException { + IndexReader reader = indexSearcher.getIndexReader(); Collection queries = collectTerms(reader, query); if (queries.size() == 0) { return new SpanMatchNoDocsQuery(query.getField(), "no expansion found for " + query.toString()); @@ -99,7 +100,7 @@ public class SpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrap + query.toString() + " ] " + "exceeds maxClauseCount [ Boolean maxClauseCount is set to " - + BooleanQuery.getMaxClauseCount() + + IndexSearcher.getMaxClauseCount() + "]" ); } else { @@ -112,6 +113,6 @@ public class SpanBooleanQueryRewriteWithMaxClause extends SpanMultiTermQueryWrap return queries; } }; - return (SpanQuery) delegate.rewrite(reader, query); + return (SpanQuery) delegate.rewrite(indexSearcher, query); } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java index f8d0c81466dc..54cd4c9946f6 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/XMoreLikeThis.java @@ -34,6 +34,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.similarities.TFIDFSimilarity; @@ -207,7 +208,7 @@ public final class XMoreLikeThis { /** * Return a Query with no more than this many terms. * - * @see BooleanQuery#getMaxClauseCount + * @see IndexSearcher#getMaxClauseCount * @see #setMaxQueryTerms */ public static final int DEFAULT_MAX_QUERY_TERMS = 25; @@ -468,7 +469,7 @@ public final class XMoreLikeThis { try { query.add(tq, BooleanClause.Occur.SHOULD); - } catch (BooleanQuery.TooManyClauses ignore) { + } catch (IndexSearcher.TooManyClauses ignore) { break; } } diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index ff82160be032..5a0c216c4e71 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -272,44 +272,65 @@ public class FunctionScoreQuery extends Query { this.needsScores = needsScores; } - private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { - Scorer subQueryScorer = subQueryWeight.scorer(context); - if (subQueryScorer == null) { + private ScorerSupplier functionScorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier subQueryScorerSupplier = subQueryWeight.scorerSupplier(context); + if (subQueryScorerSupplier == null) { return null; } - final long leadCost = subQueryScorer.iterator().cost(); - final LeafScoreFunction[] leafFunctions = new LeafScoreFunction[functions.length]; - final Bits[] docSets = new Bits[functions.length]; - for (int i = 0; i < functions.length; i++) { - ScoreFunction function = functions[i]; - leafFunctions[i] = function.getLeafScoreFunction(context); - if (filterWeights[i] != null) { - ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); - docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier, leadCost); - } else { - docSets[i] = new Bits.MatchAllBits(context.reader().maxDoc()); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + Scorer subQueryScorer = subQueryScorerSupplier.get(leadCost); + final LeafScoreFunction[] leafFunctions = new LeafScoreFunction[functions.length]; + final Bits[] docSets = new Bits[functions.length]; + for (int i = 0; i < functions.length; i++) { + ScoreFunction function = functions[i]; + leafFunctions[i] = function.getLeafScoreFunction(context); + if (filterWeights[i] != null) { + ScorerSupplier filterScorerSupplier = filterWeights[i].scorerSupplier(context); + docSets[i] = Lucene.asSequentialAccessBits(context.reader().maxDoc(), filterScorerSupplier, leadCost); + } else { + docSets[i] = new Bits.MatchAllBits(context.reader().maxDoc()); + } + } + return new FunctionFactorScorer( + subQueryScorer, + scoreMode, + functions, + maxBoost, + leafFunctions, + docSets, + combineFunction, + needsScores + ); } - } - return new FunctionFactorScorer( - this, - subQueryScorer, - scoreMode, - functions, - maxBoost, - leafFunctions, - docSets, - combineFunction, - needsScores - ); + + @Override + public long cost() { + return subQueryScorerSupplier.cost(); + } + }; } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer scorer = functionScorer(context); - if (scorer != null && minScore != null) { - scorer = new MinScoreScorer(this, scorer, minScore); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier scorerSupplier = functionScorerSupplier(context); + + if (scorerSupplier == null || minScore == null) { + return scorerSupplier; } - return scorer; + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new MinScoreScorer(scorerSupplier.get(leadCost), minScore); + } + + @Override + public long cost() { + return scorerSupplier.cost(); + } + }; } @Override @@ -356,7 +377,8 @@ public class FunctionScoreQuery extends Query { } else if (singleFunction && functionsExplanations.size() == 1) { factorExplanation = functionsExplanations.get(0); } else { - FunctionFactorScorer scorer = functionScorer(context); + + FunctionFactorScorer scorer = (FunctionFactorScorer) functionScorerSupplier(context).get(1L); int actualDoc = scorer.iterator().advance(doc); assert (actualDoc == doc); double score = scorer.computeScore(doc, expl.getValue().floatValue()); @@ -391,7 +413,6 @@ public class FunctionScoreQuery extends Query { private final boolean needsScores; private FunctionFactorScorer( - CustomBoostFactorWeight w, Scorer scorer, ScoreMode scoreMode, ScoreFunction[] functions, @@ -401,7 +422,7 @@ public class FunctionScoreQuery extends Query { CombineFunction scoreCombiner, boolean needsScores ) throws IOException { - super(scorer, w); + super(scorer); this.scoreMode = scoreMode; this.functions = functions; this.leafFunctions = leafFunctions; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java index 3d23f66b09d8..0fd46447b3ea 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/MinScoreScorer.java @@ -12,7 +12,6 @@ package org.elasticsearch.common.lucene.search.function; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import java.io.IOException; @@ -25,12 +24,11 @@ public final class MinScoreScorer extends Scorer { private float curScore; private final float boost; - public MinScoreScorer(Weight weight, Scorer scorer, float minScore) { - this(weight, scorer, minScore, 1f); + public MinScoreScorer(Scorer scorer, float minScore) { + this(scorer, minScore, 1f); } - public MinScoreScorer(Weight weight, Scorer scorer, float minScore, float boost) { - super(weight); + public MinScoreScorer(Scorer scorer, float minScore, float boost) { this.in = scorer; this.minScore = minScore; this.boost = boost; diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java index 4222b5dff98a..d38243f5348c 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreFunction.java @@ -27,14 +27,8 @@ import java.util.function.IntSupplier; public class ScriptScoreFunction extends ScoreFunction { static final class CannedScorer extends Scorable { - protected int docid; protected float score; - @Override - public int docID() { - return docid; - } - @Override public float score() { return score; @@ -70,14 +64,13 @@ public class ScriptScoreFunction extends ScoreFunction { if (script.needs_termStats()) { assert termStatsFactory != null; - leafScript._setTermStats(termStatsFactory.apply(ctx, scorer::docID)); + leafScript._setTermStats(termStatsFactory.apply(ctx, leafScript::docId)); } return new LeafScoreFunction() { private double score(int docId, float subQueryScore, ScoreScript.ExplanationHolder holder) throws IOException { leafScript.setDocument(docId); - scorer.docid = docId; scorer.score = subQueryScore; double result = leafScript.execute(holder); @@ -97,7 +90,6 @@ public class ScriptScoreFunction extends ScoreFunction { Explanation exp; if (leafScript instanceof ExplainableScoreScript) { leafScript.setDocument(docId); - scorer.docid = docId; scorer.score = subQueryScore.getValue().floatValue(); exp = ((ExplainableScoreScript) leafScript).explain(subQueryScore); } else { diff --git a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java index 5e3f8e8e6271..e58b2fffed00 100644 --- a/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java +++ b/server/src/main/java/org/elasticsearch/common/lucene/search/function/ScriptScoreQuery.java @@ -23,6 +23,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; @@ -38,6 +39,7 @@ import java.io.IOException; import java.util.HashSet; import java.util.Objects; import java.util.Set; +import java.util.function.IntSupplier; /** * A query that uses a script to compute documents' scores. @@ -104,30 +106,40 @@ public class ScriptScoreQuery extends Query { } return new Weight(this) { - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - if (minScore == null) { - final BulkScorer subQueryBulkScorer = subQueryWeight.bulkScorer(context); - if (subQueryBulkScorer == null) { - return null; - } - return new ScriptScoreBulkScorer(subQueryBulkScorer, subQueryScoreMode, makeScoreScript(context), boost); - } else { - return super.bulkScorer(context); - } - } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer subQueryScorer = subQueryWeight.scorer(context); - if (subQueryScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier subQueryScorerSupplier = subQueryWeight.scorerSupplier(context); + if (subQueryScorerSupplier == null) { return null; } - Scorer scriptScorer = new ScriptScorer(this, makeScoreScript(context), subQueryScorer, subQueryScoreMode, boost, null); - if (minScore != null) { - scriptScorer = new MinScoreScorer(this, scriptScorer, minScore); - } - return scriptScorer; + + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + Scorer subQueryScorer = subQueryScorerSupplier.get(leadCost); + Scorer scriptScorer = new ScriptScorer(makeScoreScript(context), subQueryScorer, subQueryScoreMode, boost, null); + if (minScore != null) { + scriptScorer = new MinScoreScorer(scriptScorer, minScore); + } + return scriptScorer; + } + + @Override + public BulkScorer bulkScorer() throws IOException { + if (minScore == null) { + final BulkScorer subQueryBulkScorer = subQueryScorerSupplier.bulkScorer(); + return new ScriptScoreBulkScorer(subQueryBulkScorer, subQueryScoreMode, makeScoreScript(context), boost); + } else { + return super.bulkScorer(); + } + } + + @Override + public long cost() { + return subQueryScorerSupplier.cost(); + } + }; } @Override @@ -138,7 +150,6 @@ public class ScriptScoreQuery extends Query { } ExplanationHolder explanationHolder = new ExplanationHolder(); Scorer scorer = new ScriptScorer( - this, makeScoreScript(context), subQueryWeight.scorer(context), subQueryScoreMode, @@ -231,14 +242,12 @@ public class ScriptScoreQuery extends Query { private final ExplanationHolder explanation; ScriptScorer( - Weight weight, ScoreScript scoreScript, Scorer subQueryScorer, ScoreMode subQueryScoreMode, float boost, ExplanationHolder explanation ) { - super(weight); this.scoreScript = scoreScript; if (subQueryScoreMode == ScoreMode.COMPLETE) { scoreScript.setScorer(subQueryScorer); @@ -292,19 +301,27 @@ public class ScriptScoreQuery extends Query { private final ScoreScript scoreScript; private final Scorable subQueryScorer; private final float boost; + private final IntSupplier docIDSupplier; - ScriptScorable(ScoreScript scoreScript, Scorable subQueryScorer, ScoreMode subQueryScoreMode, float boost) { + ScriptScorable( + ScoreScript scoreScript, + Scorable subQueryScorer, + ScoreMode subQueryScoreMode, + float boost, + IntSupplier docIDSupplier + ) { this.scoreScript = scoreScript; if (subQueryScoreMode == ScoreMode.COMPLETE) { scoreScript.setScorer(subQueryScorer); } this.subQueryScorer = subQueryScorer; this.boost = boost; + this.docIDSupplier = docIDSupplier; } @Override public float score() throws IOException { - int docId = docID(); + int docId = docIDSupplier.getAsInt(); scoreScript.setDocument(docId); float score = (float) scoreScript.execute(null); if (score < 0f || Float.isNaN(score)) { @@ -320,10 +337,6 @@ public class ScriptScoreQuery extends Query { return score * boost; } - @Override - public int docID() { - return subQueryScorer.docID(); - } } /** @@ -350,9 +363,18 @@ public class ScriptScoreQuery extends Query { private LeafCollector wrapCollector(LeafCollector collector) { return new FilterLeafCollector(collector) { + + private int docID; + @Override public void setScorer(Scorable scorer) throws IOException { - in.setScorer(new ScriptScorable(scoreScript, scorer, subQueryScoreMode, boost)); + in.setScorer(new ScriptScorable(scoreScript, scorer, subQueryScoreMode, boost, () -> docID)); + } + + @Override + public void collect(int doc) throws IOException { + this.docID = doc; + super.collect(doc); } }; } diff --git a/server/src/main/java/org/elasticsearch/common/regex/Regex.java b/server/src/main/java/org/elasticsearch/common/regex/Regex.java index d5b2e8497fc0..aaaab78b7173 100644 --- a/server/src/main/java/org/elasticsearch/common/regex/Regex.java +++ b/server/src/main/java/org/elasticsearch/common/regex/Regex.java @@ -69,7 +69,7 @@ public class Regex { previous = i + 1; } automata.add(Automata.makeString(pattern.substring(previous))); - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } /** @@ -113,7 +113,7 @@ public class Regex { prefixAutomaton.add(Automata.makeAnyString()); automata.add(Operations.concatenate(prefixAutomaton)); } - return Operations.union(automata); + return Operations.determinize(Operations.union(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } /** diff --git a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 232ce34b153a..defaddb25eb4 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/server/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -254,7 +254,7 @@ public class KeyStoreWrapper implements SecureSettings { } Directory directory = new NIOFSDirectory(configDir); - try (ChecksumIndexInput input = directory.openChecksumInput(KEYSTORE_FILENAME, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(KEYSTORE_FILENAME)) { final int formatVersion; try { formatVersion = CodecUtil.checkHeader(input, KEYSTORE_FILENAME, MIN_FORMAT_VERSION, CURRENT_VERSION); diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java index 2ef96123e63d..c4b03c712c27 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/support/XContentMapValues.java @@ -280,8 +280,8 @@ public class XContentMapValues { include = matchAllAutomaton; } else { Automaton includeA = Regex.simpleMatchToAutomaton(includes); - includeA = makeMatchDotsInFieldNames(includeA); - include = new CharacterRunAutomaton(includeA, MAX_DETERMINIZED_STATES); + includeA = Operations.determinize(makeMatchDotsInFieldNames(includeA), MAX_DETERMINIZED_STATES); + include = new CharacterRunAutomaton(includeA); } Automaton excludeA; @@ -289,9 +289,9 @@ public class XContentMapValues { excludeA = Automata.makeEmpty(); } else { excludeA = Regex.simpleMatchToAutomaton(excludes); - excludeA = makeMatchDotsInFieldNames(excludeA); + excludeA = Operations.determinize(makeMatchDotsInFieldNames(excludeA), MAX_DETERMINIZED_STATES); } - CharacterRunAutomaton exclude = new CharacterRunAutomaton(excludeA, MAX_DETERMINIZED_STATES); + CharacterRunAutomaton exclude = new CharacterRunAutomaton(excludeA); // NOTE: We cannot use Operations.minus because of the special case that // we want all sub properties to match as soon as an object matches diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index 749946e05b74..0c6cf2c8a076 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SerialMergeScheduler; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.index.TieredMergePolicy; import org.apache.lucene.search.DocIdSetIterator; @@ -449,7 +450,7 @@ public class PersistedClusterStateService { // resources during test execution checkIndex.setThreadCount(1); checkIndex.setInfoStream(printStream); - checkIndex.setChecksumsOnly(true); + checkIndex.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); isClean = checkIndex.checkIndex().clean; } @@ -705,10 +706,11 @@ public class PersistedClusterStateService { final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + final StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { logger.trace("processing doc {}", docIdSetIterator.docID()); - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); final BytesArray documentData = new BytesArray(document.getBinaryValue(DATA_FIELD_NAME)); if (document.getField(PAGE_FIELD_NAME) == null) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexModule.java b/server/src/main/java/org/elasticsearch/index/IndexModule.java index 7eed5f2b7759..4ff7ef60cc0a 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/server/src/main/java/org/elasticsearch/index/IndexModule.java @@ -18,7 +18,6 @@ import org.apache.lucene.index.LeafReader; import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.util.Constants; import org.apache.lucene.util.SetOnce; import org.elasticsearch.client.internal.Client; @@ -451,7 +450,7 @@ public final class IndexModule { } public static Type defaultStoreType(final boolean allowMmap) { - if (allowMmap && Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + if (allowMmap && Constants.JRE_IS_64BIT) { return Type.HYBRIDFS; } else { return Type.NIOFS; diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 7e04a64e74cb..efb1facc79b3 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.Assertions; import org.elasticsearch.core.UpdateForV9; import java.lang.reflect.Field; +import java.text.ParseException; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -48,29 +49,38 @@ public class IndexVersions { return new IndexVersion(id, luceneVersion); } + // TODO: this is just a hack to allow to keep the V7 IndexVersion constants, during compilation. Remove + private static Version parseUnchecked(String version) { + try { + return Version.parse(version); + } catch (ParseException e) { + throw new RuntimeException(e); + } + } + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // remove the index versions with which v9 will not need to interact public static final IndexVersion ZERO = def(0, Version.LATEST); - public static final IndexVersion V_7_0_0 = def(7_00_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_1_0 = def(7_01_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_0 = def(7_02_00_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_2_1 = def(7_02_01_99, Version.LUCENE_8_0_0); - public static final IndexVersion V_7_3_0 = def(7_03_00_99, Version.LUCENE_8_1_0); - public static final IndexVersion V_7_4_0 = def(7_04_00_99, Version.LUCENE_8_2_0); - public static final IndexVersion V_7_5_0 = def(7_05_00_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_5_2 = def(7_05_02_99, Version.LUCENE_8_3_0); - public static final IndexVersion V_7_6_0 = def(7_06_00_99, Version.LUCENE_8_4_0); - public static final IndexVersion V_7_7_0 = def(7_07_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_8_0 = def(7_08_00_99, Version.LUCENE_8_5_1); - public static final IndexVersion V_7_9_0 = def(7_09_00_99, Version.LUCENE_8_6_0); - public static final IndexVersion V_7_10_0 = def(7_10_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_11_0 = def(7_11_00_99, Version.LUCENE_8_7_0); - public static final IndexVersion V_7_12_0 = def(7_12_00_99, Version.LUCENE_8_8_0); - public static final IndexVersion V_7_13_0 = def(7_13_00_99, Version.LUCENE_8_8_2); - public static final IndexVersion V_7_14_0 = def(7_14_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_15_0 = def(7_15_00_99, Version.LUCENE_8_9_0); - public static final IndexVersion V_7_16_0 = def(7_16_00_99, Version.LUCENE_8_10_1); - public static final IndexVersion V_7_17_0 = def(7_17_00_99, Version.LUCENE_8_11_1); + public static final IndexVersion V_7_0_0 = def(7_00_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_1_0 = def(7_01_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_0 = def(7_02_00_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_2_1 = def(7_02_01_99, parseUnchecked("8.0.0")); + public static final IndexVersion V_7_3_0 = def(7_03_00_99, parseUnchecked("8.1.0")); + public static final IndexVersion V_7_4_0 = def(7_04_00_99, parseUnchecked("8.2.0")); + public static final IndexVersion V_7_5_0 = def(7_05_00_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_5_2 = def(7_05_02_99, parseUnchecked("8.3.0")); + public static final IndexVersion V_7_6_0 = def(7_06_00_99, parseUnchecked("8.4.0")); + public static final IndexVersion V_7_7_0 = def(7_07_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_8_0 = def(7_08_00_99, parseUnchecked("8.5.1")); + public static final IndexVersion V_7_9_0 = def(7_09_00_99, parseUnchecked("8.6.0")); + public static final IndexVersion V_7_10_0 = def(7_10_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_11_0 = def(7_11_00_99, parseUnchecked("8.7.0")); + public static final IndexVersion V_7_12_0 = def(7_12_00_99, parseUnchecked("8.8.0")); + public static final IndexVersion V_7_13_0 = def(7_13_00_99, parseUnchecked("8.8.2")); + public static final IndexVersion V_7_14_0 = def(7_14_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_15_0 = def(7_15_00_99, parseUnchecked("8.9.0")); + public static final IndexVersion V_7_16_0 = def(7_16_00_99, parseUnchecked("8.10.1")); + public static final IndexVersion V_7_17_0 = def(7_17_00_99, parseUnchecked("8.11.1")); public static final IndexVersion V_8_0_0 = def(8_00_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_1_0 = def(8_01_00_99, Version.LUCENE_9_0_0); public static final IndexVersion V_8_2_0 = def(8_02_00_99, Version.LUCENE_9_1_0); @@ -118,6 +128,9 @@ public class IndexVersions { public static final IndexVersion MERGE_ON_RECOVERY_VERSION = def(8_515_00_0, Version.LUCENE_9_11_1); public static final IndexVersion UPGRADE_TO_LUCENE_9_12 = def(8_516_00_0, Version.LUCENE_9_12_0); public static final IndexVersion ENABLE_IGNORE_ABOVE_LOGSDB = def(8_517_00_0, Version.LUCENE_9_12_0); + + public static final IndexVersion UPGRADE_TO_LUCENE_10_0_0 = def(9_000_00_0, Version.LUCENE_10_0_0); + /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java index 144b99abe564..c1c392ac07f1 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/server/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -12,7 +12,7 @@ package org.elasticsearch.index.codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FilterCodec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.core.Nullable; @@ -46,7 +46,7 @@ public class CodecService implements CodecProvider { public CodecService(@Nullable MapperService mapperService, BigArrays bigArrays) { final var codecs = new HashMap(); - Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, mapperService, bigArrays); + Codec legacyBestSpeedCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, mapperService, bigArrays); if (ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()) { codecs.put(DEFAULT_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, mapperService, bigArrays)); } else { @@ -58,7 +58,7 @@ public class CodecService implements CodecProvider { BEST_COMPRESSION_CODEC, new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, mapperService, bigArrays) ); - Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); + Codec legacyBestCompressionCodec = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, mapperService, bigArrays); codecs.put(LEGACY_BEST_COMPRESSION_CODEC, legacyBestCompressionCodec); codecs.put(LUCENE_DEFAULT_CODEC, Codec.getDefault()); diff --git a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java index 2ba169583b71..00614140e237 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/DeduplicatingFieldInfosFormat.java @@ -49,11 +49,12 @@ public final class DeduplicatingFieldInfosFormat extends FieldInfosFormat { deduplicated[i++] = new FieldInfo( FieldMapper.internFieldName(fi.getName()), fi.number, - fi.hasVectors(), + fi.hasTermVectors(), fi.omitsNorms(), fi.hasPayloads(), fi.getIndexOptions(), fi.getDocValuesType(), + fi.docValuesSkipIndexType(), fi.getDocValuesGen(), internStringStringMap(fi.attributes()), fi.getPointDimensionCount(), diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java index 27ff19a9d8e4..9f46050f68f9 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch816Codec.java @@ -9,12 +9,12 @@ package org.elasticsearch.index.codec; +import org.apache.lucene.backward_codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.StoredFieldsFormat; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; diff --git a/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java new file mode 100644 index 000000000000..4154a242c15e --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/Elasticsearch900Codec.java @@ -0,0 +1,131 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.codec; + +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.KnnVectorsFormat; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.StoredFieldsFormat; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; +import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; +import org.apache.lucene.codecs.lucene912.Lucene912PostingsFormat; +import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; +import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; +import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; +import org.elasticsearch.index.codec.zstd.Zstd814StoredFieldsFormat; + +/** + * Elasticsearch codec as of 9.0. This extends the Lucene 10.0 codec to compressed stored fields with ZSTD instead of LZ4/DEFLATE. See + * {@link Zstd814StoredFieldsFormat}. + */ +public class Elasticsearch900Codec extends CodecService.DeduplicateFieldInfosCodec { + + private final StoredFieldsFormat storedFieldsFormat; + + private final PostingsFormat defaultPostingsFormat; + private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() { + @Override + public PostingsFormat getPostingsFormatForField(String field) { + return Elasticsearch900Codec.this.getPostingsFormatForField(field); + } + }; + + private final DocValuesFormat defaultDVFormat; + private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { + @Override + public DocValuesFormat getDocValuesFormatForField(String field) { + return Elasticsearch900Codec.this.getDocValuesFormatForField(field); + } + }; + + private final KnnVectorsFormat defaultKnnVectorsFormat; + private final KnnVectorsFormat knnVectorsFormat = new PerFieldKnnVectorsFormat() { + @Override + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return Elasticsearch900Codec.this.getKnnVectorsFormatForField(field); + } + }; + + /** Public no-arg constructor, needed for SPI loading at read-time. */ + public Elasticsearch900Codec() { + this(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + } + + /** + * Constructor. Takes a {@link Zstd814StoredFieldsFormat.Mode} that describes whether to optimize for retrieval speed at the expense of + * worse space-efficiency or vice-versa. + */ + public Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode mode) { + super("Elasticsearch900", new Lucene100Codec()); + this.storedFieldsFormat = mode.getFormat(); + this.defaultPostingsFormat = new Lucene912PostingsFormat(); + this.defaultDVFormat = new Lucene90DocValuesFormat(); + this.defaultKnnVectorsFormat = new Lucene99HnswVectorsFormat(); + } + + @Override + public StoredFieldsFormat storedFieldsFormat() { + return storedFieldsFormat; + } + + @Override + public final PostingsFormat postingsFormat() { + return postingsFormat; + } + + @Override + public final DocValuesFormat docValuesFormat() { + return docValuesFormat; + } + + @Override + public final KnnVectorsFormat knnVectorsFormat() { + return knnVectorsFormat; + } + + /** + * Returns the postings format that should be used for writing new segments of field. + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation, + */ + public PostingsFormat getPostingsFormatForField(String field) { + return defaultPostingsFormat; + } + + /** + * Returns the docvalues format that should be used for writing new segments of field + * . + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public DocValuesFormat getDocValuesFormatForField(String field) { + return defaultDVFormat; + } + + /** + * Returns the vectors format that should be used for writing new segments of field + * + *

The default implementation always returns "Lucene912". + * + *

WARNING: if you subclass, you are responsible for index backwards compatibility: + * future version of Lucene are only guaranteed to be able to read the default implementation. + */ + public KnnVectorsFormat getKnnVectorsFormatForField(String field) { + return defaultKnnVectorsFormat; + } + +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java index 64c2ca788f63..bf2c5a9f01e2 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/LegacyPerFieldMapperCodec.java @@ -13,7 +13,7 @@ import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.mapper.MapperService; @@ -22,11 +22,11 @@ import org.elasticsearch.index.mapper.MapperService; * Legacy version of {@link PerFieldMapperCodec}. This codec is preserved to give an escape hatch in case we encounter issues with new * changes in {@link PerFieldMapperCodec}. */ -public final class LegacyPerFieldMapperCodec extends Lucene912Codec { +public final class LegacyPerFieldMapperCodec extends Lucene100Codec { private final PerFieldFormatSupplier formatSupplier; - public LegacyPerFieldMapperCodec(Lucene912Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { + public LegacyPerFieldMapperCodec(Lucene100Codec.Mode compressionMode, MapperService mapperService, BigArrays bigArrays) { super(compressionMode); this.formatSupplier = new PerFieldFormatSupplier(mapperService, bigArrays); // If the below assertion fails, it is a sign that Lucene released a new codec. You must create a copy of the current Elasticsearch diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index 83c5cb396d88..b60b88da5949 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -26,7 +26,7 @@ import org.elasticsearch.index.mapper.MapperService; * per index in real time via the mapping API. If no specific postings format or vector format is * configured for a specific field the default postings or vector format is used. */ -public final class PerFieldMapperCodec extends Elasticsearch816Codec { +public final class PerFieldMapperCodec extends Elasticsearch900Codec { private final PerFieldFormatSupplier formatSupplier; diff --git a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java index d26fb52a82bc..81129835518d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES85BloomFilterPostingsFormat.java @@ -36,7 +36,6 @@ import org.apache.lucene.index.TermState; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ChecksumIndexInput; -import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RandomAccessInput; @@ -142,12 +141,7 @@ public class ES85BloomFilterPostingsFormat extends PostingsFormat { FieldsReader(SegmentReadState state) throws IOException { boolean success = false; - try ( - ChecksumIndexInput metaIn = state.directory.openChecksumInput( - metaFile(state.segmentInfo, state.segmentSuffix), - IOContext.READONCE - ) - ) { + try (ChecksumIndexInput metaIn = state.directory.openChecksumInput(metaFile(state.segmentInfo, state.segmentSuffix))) { CodecUtil.checkIndexHeader( metaIn, BLOOM_CODEC_NAME, diff --git a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java index 01d874adec14..abf68abe5188 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/bloomfilter/ES87BloomFilterPostingsFormat.java @@ -38,7 +38,6 @@ import org.apache.lucene.index.TermState; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.store.ChecksumIndexInput; -import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RandomAccessInput; @@ -291,12 +290,7 @@ public class ES87BloomFilterPostingsFormat extends PostingsFormat { FieldsReader(SegmentReadState state) throws IOException { boolean success = false; - try ( - ChecksumIndexInput metaIn = state.directory.openChecksumInput( - metaFile(state.segmentInfo, state.segmentSuffix), - IOContext.READONCE - ) - ) { + try (ChecksumIndexInput metaIn = state.directory.openChecksumInput(metaFile(state.segmentInfo, state.segmentSuffix))) { Map bloomFilters = null; Throwable priorE = null; long indexFileLength = 0; diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java index 5d79807fe667..dc73428a07c7 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesConsumer.java @@ -15,6 +15,7 @@ import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.lucene90.IndexedDISI; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.EmptyDocValuesProducer; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexFileNames; @@ -41,9 +42,13 @@ import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.core.IOUtils; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; +import java.util.List; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_LEVEL_SHIFT; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_MAX_LEVEL; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SORTED_SET; final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { @@ -51,9 +56,16 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { IndexOutput data, meta; final int maxDoc; private byte[] termsDictBuffer; + private final int skipIndexIntervalSize; - ES87TSDBDocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) - throws IOException { + ES87TSDBDocValuesConsumer( + SegmentWriteState state, + int skipIndexIntervalSize, + String dataCodec, + String dataExtension, + String metaCodec, + String metaExtension + ) throws IOException { this.termsDictBuffer = new byte[1 << 14]; boolean success = false; try { @@ -76,6 +88,7 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { state.segmentSuffix ); maxDoc = state.segmentInfo.maxDoc(); + this.skipIndexIntervalSize = skipIndexIntervalSize; success = true; } finally { if (success == false) { @@ -88,12 +101,17 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.NUMERIC); - writeField(field, new EmptyDocValuesProducer() { + DocValuesProducer producer = new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { return DocValues.singleton(valuesProducer.getNumeric(field)); } - }, -1); + }; + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, producer); + } + + writeField(field, producer, -1); } private long[] writeField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { @@ -263,13 +281,11 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { meta.writeInt(field.number); meta.writeByte(ES87TSDBDocValuesFormat.SORTED); - doAddSortedField(field, valuesProducer); + doAddSortedField(field, valuesProducer, false); } - private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { - SortedDocValues sorted = valuesProducer.getSorted(field); - int maxOrd = sorted.getValueCount(); - writeField(field, new EmptyDocValuesProducer() { + private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer, boolean addTypeByte) throws IOException { + DocValuesProducer producer = new EmptyDocValuesProducer() { @Override public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { SortedDocValues sorted = valuesProducer.getSorted(field); @@ -306,7 +322,16 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { }; return DocValues.singleton(sortedOrds); } - }, maxOrd); + }; + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, producer); + } + if (addTypeByte) { + meta.writeByte((byte) 0); // multiValued (0 = singleValued) + } + SortedDocValues sorted = valuesProducer.getSorted(field); + int maxOrd = sorted.getValueCount(); + writeField(field, producer, maxOrd); addTermsDict(DocValues.singleton(valuesProducer.getSorted(field))); } @@ -459,6 +484,12 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { } private void writeSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer, long maxOrd) throws IOException { + if (field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + writeSkipIndex(field, valuesProducer); + } + if (maxOrd > -1) { + meta.writeByte((byte) 1); // multiValued (1 = multiValued) + } long[] stats = writeField(field, valuesProducer, maxOrd); int numDocsWithField = Math.toIntExact(stats[0]); long numValues = stats[1]; @@ -510,16 +541,14 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { meta.writeByte(SORTED_SET); if (isSingleValued(valuesProducer.getSortedSet(field))) { - meta.writeByte((byte) 0); // multiValued (0 = singleValued) doAddSortedField(field, new EmptyDocValuesProducer() { @Override public SortedDocValues getSorted(FieldInfo field) throws IOException { return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN); } - }); + }, true); return; } - meta.writeByte((byte) 1); // multiValued (1 = multiValued) SortedSetDocValues values = valuesProducer.getSortedSet(field); long maxOrd = values.getValueCount(); @@ -603,4 +632,157 @@ final class ES87TSDBDocValuesConsumer extends DocValuesConsumer { meta = data = null; } } + + private static class SkipAccumulator { + int minDocID; + int maxDocID; + int docCount; + long minValue; + long maxValue; + + SkipAccumulator(int docID) { + minDocID = docID; + minValue = Long.MAX_VALUE; + maxValue = Long.MIN_VALUE; + docCount = 0; + } + + boolean isDone(int skipIndexIntervalSize, int valueCount, long nextValue, int nextDoc) { + if (docCount < skipIndexIntervalSize) { + return false; + } + // Once we reach the interval size, we will keep accepting documents if + // - next doc value is not a multi-value + // - current accumulator only contains a single value and next value is the same value + // - the accumulator is dense and the next doc keeps the density (no gaps) + return valueCount > 1 || minValue != maxValue || minValue != nextValue || docCount != nextDoc - minDocID; + } + + void accumulate(long value) { + minValue = Math.min(minValue, value); + maxValue = Math.max(maxValue, value); + } + + void accumulate(SkipAccumulator other) { + assert minDocID <= other.minDocID && maxDocID < other.maxDocID; + maxDocID = other.maxDocID; + minValue = Math.min(minValue, other.minValue); + maxValue = Math.max(maxValue, other.maxValue); + docCount += other.docCount; + } + + void nextDoc(int docID) { + maxDocID = docID; + ++docCount; + } + + public static SkipAccumulator merge(List list, int index, int length) { + SkipAccumulator acc = new SkipAccumulator(list.get(index).minDocID); + for (int i = 0; i < length; i++) { + acc.accumulate(list.get(index + i)); + } + return acc; + } + } + + private void writeSkipIndex(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + assert field.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE; + final long start = data.getFilePointer(); + final SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + long globalMaxValue = Long.MIN_VALUE; + long globalMinValue = Long.MAX_VALUE; + int globalDocCount = 0; + int maxDocId = -1; + final List accumulators = new ArrayList<>(); + SkipAccumulator accumulator = null; + final int maxAccumulators = 1 << (SKIP_INDEX_LEVEL_SHIFT * (SKIP_INDEX_MAX_LEVEL - 1)); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + final long firstValue = values.nextValue(); + if (accumulator != null && accumulator.isDone(skipIndexIntervalSize, values.docValueCount(), firstValue, doc)) { + globalMaxValue = Math.max(globalMaxValue, accumulator.maxValue); + globalMinValue = Math.min(globalMinValue, accumulator.minValue); + globalDocCount += accumulator.docCount; + maxDocId = accumulator.maxDocID; + accumulator = null; + if (accumulators.size() == maxAccumulators) { + writeLevels(accumulators); + accumulators.clear(); + } + } + if (accumulator == null) { + accumulator = new SkipAccumulator(doc); + accumulators.add(accumulator); + } + accumulator.nextDoc(doc); + accumulator.accumulate(firstValue); + for (int i = 1, end = values.docValueCount(); i < end; ++i) { + accumulator.accumulate(values.nextValue()); + } + } + + if (accumulators.isEmpty() == false) { + globalMaxValue = Math.max(globalMaxValue, accumulator.maxValue); + globalMinValue = Math.min(globalMinValue, accumulator.minValue); + globalDocCount += accumulator.docCount; + maxDocId = accumulator.maxDocID; + writeLevels(accumulators); + } + meta.writeLong(start); // record the start in meta + meta.writeLong(data.getFilePointer() - start); // record the length + assert globalDocCount == 0 || globalMaxValue >= globalMinValue; + meta.writeLong(globalMaxValue); + meta.writeLong(globalMinValue); + assert globalDocCount <= maxDocId + 1; + meta.writeInt(globalDocCount); + meta.writeInt(maxDocId); + } + + private void writeLevels(List accumulators) throws IOException { + final List> accumulatorsLevels = new ArrayList<>(SKIP_INDEX_MAX_LEVEL); + accumulatorsLevels.add(accumulators); + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL - 1; i++) { + accumulatorsLevels.add(buildLevel(accumulatorsLevels.get(i))); + } + int totalAccumulators = accumulators.size(); + for (int index = 0; index < totalAccumulators; index++) { + // compute how many levels we need to write for the current accumulator + final int levels = getLevels(index, totalAccumulators); + // write the number of levels + data.writeByte((byte) levels); + // write intervals in reverse order. This is done so we don't + // need to read all of them in case of slipping + for (int level = levels - 1; level >= 0; level--) { + final SkipAccumulator accumulator = accumulatorsLevels.get(level).get(index >> (SKIP_INDEX_LEVEL_SHIFT * level)); + data.writeInt(accumulator.maxDocID); + data.writeInt(accumulator.minDocID); + data.writeLong(accumulator.maxValue); + data.writeLong(accumulator.minValue); + data.writeInt(accumulator.docCount); + } + } + } + + private static List buildLevel(List accumulators) { + final int levelSize = 1 << SKIP_INDEX_LEVEL_SHIFT; + final List collector = new ArrayList<>(); + for (int i = 0; i < accumulators.size() - levelSize + 1; i += levelSize) { + collector.add(SkipAccumulator.merge(accumulators, i, levelSize)); + } + return collector; + } + + private static int getLevels(int index, int size) { + if (Integer.numberOfTrailingZeros(index) >= SKIP_INDEX_LEVEL_SHIFT) { + // TODO: can we do it in constant time rather than linearly with SKIP_INDEX_MAX_LEVEL? + final int left = size - index; + for (int level = SKIP_INDEX_MAX_LEVEL - 1; level > 0; level--) { + final int numberIntervals = 1 << (SKIP_INDEX_LEVEL_SHIFT * level); + if (left >= numberIntervals && index % numberIntervals == 0) { + return level + 1; + } + } + } + return 1; + } + } diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java index 742249892f61..496c41b42869 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormat.java @@ -43,13 +43,57 @@ public class ES87TSDBDocValuesFormat extends org.apache.lucene.codecs.DocValuesF static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; + // number of documents in an interval + private static final int DEFAULT_SKIP_INDEX_INTERVAL_SIZE = 4096; + // bytes on an interval: + // * 1 byte : number of levels + // * 16 bytes: min / max value, + // * 8 bytes: min / max docID + // * 4 bytes: number of documents + private static final long SKIP_INDEX_INTERVAL_BYTES = 29L; + // number of intervals represented as a shift to create a new level, this is 1 << 3 == 8 + // intervals. + static final int SKIP_INDEX_LEVEL_SHIFT = 3; + // max number of levels + // Increasing this number, it increases how much heap we need at index time. + // we currently need (1 * 8 * 8 * 8) = 512 accumulators on heap + static final int SKIP_INDEX_MAX_LEVEL = 4; + // number of bytes to skip when skipping a level. It does not take into account the + // current interval that is being read. + static final long[] SKIP_INDEX_JUMP_LENGTH_PER_LEVEL = new long[SKIP_INDEX_MAX_LEVEL]; + + static { + // Size of the interval minus read bytes (1 byte for level and 4 bytes for maxDocID) + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[0] = SKIP_INDEX_INTERVAL_BYTES - 5L; + for (int level = 1; level < SKIP_INDEX_MAX_LEVEL; level++) { + // jump from previous level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] = SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level - 1]; + // nodes added by new level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] += (1 << (level * SKIP_INDEX_LEVEL_SHIFT)) * SKIP_INDEX_INTERVAL_BYTES; + // remove the byte levels added in the previous level + SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level] -= (1 << ((level - 1) * SKIP_INDEX_LEVEL_SHIFT)); + } + } + + private final int skipIndexIntervalSize; + + /** Default constructor. */ public ES87TSDBDocValuesFormat() { + this(DEFAULT_SKIP_INDEX_INTERVAL_SIZE); + } + + /** Doc values fields format with specified skipIndexIntervalSize. */ + public ES87TSDBDocValuesFormat(int skipIndexIntervalSize) { super(CODEC_NAME); + if (skipIndexIntervalSize < 2) { + throw new IllegalArgumentException("skipIndexIntervalSize must be > 1, got [" + skipIndexIntervalSize + "]"); + } + this.skipIndexIntervalSize = skipIndexIntervalSize; } @Override public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { - return new ES87TSDBDocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + return new ES87TSDBDocValuesConsumer(state, skipIndexIntervalSize, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java index e3f7e829c1d2..d5c94de1c694 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesProducer.java @@ -16,6 +16,8 @@ import org.apache.lucene.index.BaseTermsEnum; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.ImpactsEnum; @@ -27,6 +29,7 @@ import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.DataInput; @@ -43,6 +46,8 @@ import java.io.IOException; import java.util.HashMap; import java.util.Map; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_JUMP_LENGTH_PER_LEVEL; +import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.SKIP_INDEX_MAX_LEVEL; import static org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat.TERMS_DICT_BLOCK_LZ4_SHIFT; public class ES87TSDBDocValuesProducer extends DocValuesProducer { @@ -51,6 +56,7 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { private final Map sorted = new HashMap<>(); private final Map sortedSets = new HashMap<>(); private final Map sortedNumerics = new HashMap<>(); + private final Map skippers = new HashMap<>(); private final IndexInput data; private final int maxDoc; @@ -61,7 +67,7 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { // read in the entries from the metadata file. int version = -1; - try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName, state.context)) { + try (ChecksumIndexInput in = state.directory.openChecksumInput(metaName)) { Throwable priorE = null; try { @@ -659,9 +665,8 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { i = 0; count = ords.docValueCount(); } - if (i++ == count) { - return NO_MORE_ORDS; - } + assert i < count; + i++; return ords.nextValue(); } @@ -700,6 +705,116 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { }; } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + final DocValuesSkipperEntry entry = skippers.get(field.name); + + final IndexInput input = data.slice("doc value skipper", entry.offset, entry.length); + // Prefetch the first page of data. Following pages are expected to get prefetched through + // read-ahead. + if (input.length() > 0) { + input.prefetch(0, 1); + } + // TODO: should we write to disk the actual max level for this segment? + return new DocValuesSkipper() { + final int[] minDocID = new int[SKIP_INDEX_MAX_LEVEL]; + final int[] maxDocID = new int[SKIP_INDEX_MAX_LEVEL]; + + { + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL; i++) { + minDocID[i] = maxDocID[i] = -1; + } + } + + final long[] minValue = new long[SKIP_INDEX_MAX_LEVEL]; + final long[] maxValue = new long[SKIP_INDEX_MAX_LEVEL]; + final int[] docCount = new int[SKIP_INDEX_MAX_LEVEL]; + int levels = 1; + + @Override + public void advance(int target) throws IOException { + if (target > entry.maxDocId) { + // skipper is exhausted + for (int i = 0; i < SKIP_INDEX_MAX_LEVEL; i++) { + minDocID[i] = maxDocID[i] = DocIdSetIterator.NO_MORE_DOCS; + } + } else { + // find next interval + assert target > maxDocID[0] : "target must be bigger that current interval"; + while (true) { + levels = input.readByte(); + assert levels <= SKIP_INDEX_MAX_LEVEL && levels > 0 : "level out of range [" + levels + "]"; + boolean valid = true; + // check if current interval is competitive or we can jump to the next position + for (int level = levels - 1; level >= 0; level--) { + if ((maxDocID[level] = input.readInt()) < target) { + input.skipBytes(SKIP_INDEX_JUMP_LENGTH_PER_LEVEL[level]); // the jump for the level + valid = false; + break; + } + minDocID[level] = input.readInt(); + maxValue[level] = input.readLong(); + minValue[level] = input.readLong(); + docCount[level] = input.readInt(); + } + if (valid) { + // adjust levels + while (levels < SKIP_INDEX_MAX_LEVEL && maxDocID[levels] >= target) { + levels++; + } + break; + } + } + } + } + + @Override + public int numLevels() { + return levels; + } + + @Override + public int minDocID(int level) { + return minDocID[level]; + } + + @Override + public int maxDocID(int level) { + return maxDocID[level]; + } + + @Override + public long minValue(int level) { + return minValue[level]; + } + + @Override + public long maxValue(int level) { + return maxValue[level]; + } + + @Override + public int docCount(int level) { + return docCount[level]; + } + + @Override + public long minValue() { + return entry.minValue; + } + + @Override + public long maxValue() { + return entry.maxValue; + } + + @Override + public int docCount() { + return entry.docCount; + } + }; + } + @Override public void checkIntegrity() throws IOException { CodecUtil.checksumEntireFile(data); @@ -717,6 +832,9 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); } byte type = meta.readByte(); + if (info.docValuesSkipIndexType() != DocValuesSkipIndexType.NONE) { + skippers.put(info.name, readDocValueSkipperMeta(meta)); + } if (type == ES87TSDBDocValuesFormat.NUMERIC) { numerics.put(info.name, readNumeric(meta)); } else if (type == ES87TSDBDocValuesFormat.BINARY) { @@ -739,6 +857,17 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { return entry; } + private static DocValuesSkipperEntry readDocValueSkipperMeta(IndexInput meta) throws IOException { + long offset = meta.readLong(); + long length = meta.readLong(); + long maxValue = meta.readLong(); + long minValue = meta.readLong(); + int docCount = meta.readInt(); + int maxDocID = meta.readInt(); + + return new DocValuesSkipperEntry(offset, length, minValue, maxValue, docCount, maxDocID); + } + private static void readNumeric(IndexInput meta, NumericEntry entry) throws IOException { entry.docsWithFieldOffset = meta.readLong(); entry.docsWithFieldLength = meta.readLong(); @@ -1249,6 +1378,8 @@ public class ES87TSDBDocValuesProducer extends DocValuesProducer { } } + private record DocValuesSkipperEntry(long offset, long length, long minValue, long maxValue, int docCount, int maxDocId) {} + private static class NumericEntry { long docsWithFieldOffset; long docsWithFieldLength; diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java index 73dd4273a794..cf69ab086294 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/BinarizedByteVectorValues.java @@ -19,23 +19,52 @@ */ package org.elasticsearch.index.codec.vectors; -import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.search.VectorScorer; +import org.apache.lucene.util.VectorUtil; import java.io.IOException; +import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; + /** * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 */ -public abstract class BinarizedByteVectorValues extends DocIdSetIterator { +public abstract class BinarizedByteVectorValues extends ByteVectorValues { - public abstract float[] getCorrectiveTerms(); - - public abstract byte[] vectorValue() throws IOException; + public abstract float[] getCorrectiveTerms(int vectorOrd) throws IOException; /** Return the dimension of the vectors */ public abstract int dimension(); + /** Returns the centroid distance for the vector */ + public abstract float getCentroidDistance(int vectorOrd) throws IOException; + + /** Returns the vector magnitude for the vector */ + public abstract float getVectorMagnitude(int vectorOrd) throws IOException; + + /** Returns OOQ corrective factor for the given vector ordinal */ + public abstract float getOOQ(int targetOrd) throws IOException; + + /** + * Returns the norm of the target vector w the centroid corrective factor for the given vector + * ordinal + */ + public abstract float getNormOC(int targetOrd) throws IOException; + + /** + * Returns the target vector dot product the centroid corrective factor for the given vector + * ordinal + */ + public abstract float getODotC(int targetOrd) throws IOException; + + /** + * @return the quantizer used to quantize the vectors + */ + public abstract BinaryQuantizer getQuantizer(); + + public abstract float[] getCentroid() throws IOException; + /** * Return the number of vectors for this field. * @@ -43,9 +72,16 @@ public abstract class BinarizedByteVectorValues extends DocIdSetIterator { */ public abstract int size(); - @Override - public final long cost() { - return size(); + int discretizedDimensions() { + return BQVectorUtils.discretize(dimension(), 64); + } + + float sqrtDimensions() { + return (float) constSqrt(dimension()); + } + + float maxX1() { + return (float) (1.9 / constSqrt(discretizedDimensions() - 1.0)); } /** @@ -55,4 +91,13 @@ public abstract class BinarizedByteVectorValues extends DocIdSetIterator { * @return a {@link VectorScorer} instance or null */ public abstract VectorScorer scorer(float[] query) throws IOException; + + @Override + public abstract BinarizedByteVectorValues copy() throws IOException; + + float getCentroidDP() throws IOException { + // this only gets executed on-merge + float[] centroid = getCentroid(); + return VectorUtil.dotProduct(centroid, centroid); + } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java index cc5454ee074e..ab882c8b0464 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormat.java @@ -152,10 +152,5 @@ public class ES813FlatVectorFormat extends KnnVectorsFormat { public void close() throws IOException { reader.close(); } - - @Override - public long ramBytesUsed() { - return reader.ramBytesUsed(); - } } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java index 9491598653c4..662e4040511e 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormat.java @@ -160,11 +160,5 @@ public class ES813Int8FlatVectorFormat extends KnnVectorsFormat { public void close() throws IOException { reader.close(); } - - @Override - public long ramBytesUsed() { - return reader.ramBytesUsed(); - } - } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java index 10a20839ab3c..4c4fd0080695 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES814ScalarQuantizedVectorsFormat.java @@ -22,18 +22,17 @@ import org.apache.lucene.codecs.lucene99.Lucene99ScalarQuantizedVectorsWriter; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Sorter; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import org.apache.lucene.util.quantization.QuantizedVectorsReader; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; import org.apache.lucene.util.quantization.ScalarQuantizer; import org.elasticsearch.simdvec.VectorScorerFactory; import org.elasticsearch.simdvec.VectorSimilarityType; @@ -246,9 +245,9 @@ public class ES814ScalarQuantizedVectorsFormat extends FlatVectorsFormat { } @Override - public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarityFunction sim, RandomAccessVectorValues values) + public RandomVectorScorerSupplier getRandomVectorScorerSupplier(VectorSimilarityFunction sim, KnnVectorValues values) throws IOException { - if (values instanceof RandomAccessQuantizedByteVectorValues qValues && values.getSlice() != null) { + if (values instanceof QuantizedByteVectorValues qValues && qValues.getSlice() != null) { // TODO: optimize int4 quantization if (qValues.getScalarQuantizer().getBits() != 7) { return delegate.getRandomVectorScorerSupplier(sim, values); @@ -256,7 +255,7 @@ public class ES814ScalarQuantizedVectorsFormat extends FlatVectorsFormat { if (factory != null) { var scorer = factory.getInt7SQVectorScorerSupplier( VectorSimilarityType.of(sim), - values.getSlice(), + qValues.getSlice(), qValues, qValues.getScalarQuantizer().getConstantMultiplier() ); @@ -269,9 +268,9 @@ public class ES814ScalarQuantizedVectorsFormat extends FlatVectorsFormat { } @Override - public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, RandomAccessVectorValues values, float[] query) + public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, KnnVectorValues values, float[] query) throws IOException { - if (values instanceof RandomAccessQuantizedByteVectorValues qValues && values.getSlice() != null) { + if (values instanceof QuantizedByteVectorValues qValues && qValues.getSlice() != null) { // TODO: optimize int4 quantization if (qValues.getScalarQuantizer().getBits() != 7) { return delegate.getRandomVectorScorer(sim, values, query); @@ -287,7 +286,7 @@ public class ES814ScalarQuantizedVectorsFormat extends FlatVectorsFormat { } @Override - public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, RandomAccessVectorValues values, byte[] query) + public RandomVectorScorer getRandomVectorScorer(VectorSimilarityFunction sim, KnnVectorValues values, byte[] query) throws IOException { return delegate.getRandomVectorScorer(sim, values, query); } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java index 7e586e210afd..18668f4f304b 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorsFormat.java @@ -14,14 +14,15 @@ import org.apache.lucene.codecs.hnsw.FlatVectorsReader; import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; import org.apache.lucene.codecs.hnsw.FlatVectorsWriter; import org.apache.lucene.codecs.lucene99.Lucene99FlatVectorsFormat; +import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.SegmentReadState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.VectorUtil; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; -import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues; +import org.apache.lucene.util.quantization.QuantizedByteVectorValues; import java.io.IOException; @@ -68,14 +69,14 @@ class ES815BitFlatVectorsFormat extends FlatVectorsFormat { @Override public RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues + KnnVectorValues vectorValues ) throws IOException { - assert randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes; + assert vectorValues instanceof ByteVectorValues; assert vectorSimilarityFunction == VectorSimilarityFunction.EUCLIDEAN; - if (randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes randomAccessVectorValuesBytes) { - assert randomAccessVectorValues instanceof RandomAccessQuantizedByteVectorValues == false; + if (vectorValues instanceof ByteVectorValues byteVectorValues) { + assert byteVectorValues instanceof QuantizedByteVectorValues == false; return switch (vectorSimilarityFunction) { - case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingScorerSupplier(randomAccessVectorValuesBytes); + case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingScorerSupplier(byteVectorValues); }; } throw new IllegalArgumentException("Unsupported vector type or similarity function"); @@ -84,18 +85,15 @@ class ES815BitFlatVectorsFormat extends FlatVectorsFormat { @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues, - byte[] bytes - ) { - assert randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes; + KnnVectorValues vectorValues, + byte[] target + ) throws IOException { + assert vectorValues instanceof ByteVectorValues; assert vectorSimilarityFunction == VectorSimilarityFunction.EUCLIDEAN; - if (randomAccessVectorValues instanceof RandomAccessVectorValues.Bytes randomAccessVectorValuesBytes) { - checkDimensions(bytes.length, randomAccessVectorValuesBytes.dimension()); + if (vectorValues instanceof ByteVectorValues byteVectorValues) { + checkDimensions(target.length, byteVectorValues.dimension()); return switch (vectorSimilarityFunction) { - case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingVectorScorer( - randomAccessVectorValuesBytes, - bytes - ); + case DOT_PRODUCT, MAXIMUM_INNER_PRODUCT, COSINE, EUCLIDEAN -> new HammingVectorScorer(byteVectorValues, target); }; } throw new IllegalArgumentException("Unsupported vector type or similarity function"); @@ -103,10 +101,10 @@ class ES815BitFlatVectorsFormat extends FlatVectorsFormat { @Override public RandomVectorScorer getRandomVectorScorer( - VectorSimilarityFunction vectorSimilarityFunction, - RandomAccessVectorValues randomAccessVectorValues, - float[] floats - ) { + VectorSimilarityFunction similarityFunction, + KnnVectorValues vectorValues, + float[] target + ) throws IOException { throw new IllegalArgumentException("Unsupported vector type"); } } @@ -117,9 +115,9 @@ class ES815BitFlatVectorsFormat extends FlatVectorsFormat { static class HammingVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { private final byte[] query; - private final RandomAccessVectorValues.Bytes byteValues; + private final ByteVectorValues byteValues; - HammingVectorScorer(RandomAccessVectorValues.Bytes byteValues, byte[] query) { + HammingVectorScorer(ByteVectorValues byteValues, byte[] query) { super(byteValues); this.query = query; this.byteValues = byteValues; @@ -132,9 +130,9 @@ class ES815BitFlatVectorsFormat extends FlatVectorsFormat { } static class HammingScorerSupplier implements RandomVectorScorerSupplier { - private final RandomAccessVectorValues.Bytes byteValues, byteValues1, byteValues2; + private final ByteVectorValues byteValues, byteValues1, byteValues2; - HammingScorerSupplier(RandomAccessVectorValues.Bytes byteValues) throws IOException { + HammingScorerSupplier(ByteVectorValues byteValues) throws IOException { this.byteValues = byteValues; this.byteValues1 = byteValues.copy(); this.byteValues2 = byteValues.copy(); diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java index f4d22edc6dfd..72c5da4880e7 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorer.java @@ -20,10 +20,10 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.codecs.hnsw.FlatVectorsScorer; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.VectorUtil; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.elasticsearch.simdvec.ESVectorUtil; @@ -45,9 +45,9 @@ public class ES816BinaryFlatVectorsScorer implements FlatVectorsScorer { @Override public RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues + KnnVectorValues vectorValues ) throws IOException { - if (vectorValues instanceof RandomAccessBinarizedByteVectorValues) { + if (vectorValues instanceof BinarizedByteVectorValues) { throw new UnsupportedOperationException( "getRandomVectorScorerSupplier(VectorSimilarityFunction,RandomAccessVectorValues) not implemented for binarized format" ); @@ -58,10 +58,10 @@ public class ES816BinaryFlatVectorsScorer implements FlatVectorsScorer { @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues, + KnnVectorValues vectorValues, float[] target ) throws IOException { - if (vectorValues instanceof RandomAccessBinarizedByteVectorValues binarizedVectors) { + if (vectorValues instanceof BinarizedByteVectorValues binarizedVectors) { BinaryQuantizer quantizer = binarizedVectors.getQuantizer(); float[] centroid = binarizedVectors.getCentroid(); // FIXME: precompute this once? @@ -82,7 +82,7 @@ public class ES816BinaryFlatVectorsScorer implements FlatVectorsScorer { @Override public RandomVectorScorer getRandomVectorScorer( VectorSimilarityFunction similarityFunction, - RandomAccessVectorValues vectorValues, + KnnVectorValues vectorValues, byte[] target ) throws IOException { return nonQuantizedDelegate.getRandomVectorScorer(similarityFunction, vectorValues, target); @@ -91,7 +91,7 @@ public class ES816BinaryFlatVectorsScorer implements FlatVectorsScorer { RandomVectorScorerSupplier getRandomVectorScorerSupplier( VectorSimilarityFunction similarityFunction, ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues scoringVectors, - RandomAccessBinarizedByteVectorValues targetVectors + BinarizedByteVectorValues targetVectors ) { return new BinarizedRandomVectorScorerSupplier(scoringVectors, targetVectors, similarityFunction); } @@ -104,12 +104,12 @@ public class ES816BinaryFlatVectorsScorer implements FlatVectorsScorer { /** Vector scorer supplier over binarized vector values */ static class BinarizedRandomVectorScorerSupplier implements RandomVectorScorerSupplier { private final ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues queryVectors; - private final RandomAccessBinarizedByteVectorValues targetVectors; + private final BinarizedByteVectorValues targetVectors; private final VectorSimilarityFunction similarityFunction; BinarizedRandomVectorScorerSupplier( ES816BinaryQuantizedVectorsWriter.OffHeapBinarizedQueryVectorValues queryVectors, - RandomAccessBinarizedByteVectorValues targetVectors, + BinarizedByteVectorValues targetVectors, VectorSimilarityFunction similarityFunction ) { this.queryVectors = queryVectors; @@ -149,7 +149,7 @@ public class ES816BinaryFlatVectorsScorer implements FlatVectorsScorer { /** Vector scorer over binarized vector values */ public static class BinarizedRandomVectorScorer extends RandomVectorScorer.AbstractRandomVectorScorer { private final BinaryQueryVector queryVector; - private final RandomAccessBinarizedByteVectorValues targetVectors; + private final BinarizedByteVectorValues targetVectors; private final VectorSimilarityFunction similarityFunction; private final float sqrtDimensions; @@ -157,7 +157,7 @@ public class ES816BinaryFlatVectorsScorer implements FlatVectorsScorer { public BinarizedRandomVectorScorer( BinaryQueryVector queryVectors, - RandomAccessBinarizedByteVectorValues targetVectors, + BinarizedByteVectorValues targetVectors, VectorSimilarityFunction similarityFunction ) { super(targetVectors); diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java index b0378fee6793..21c4a5c44938 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsReader.java @@ -36,6 +36,7 @@ import org.apache.lucene.search.VectorScorer; import org.apache.lucene.store.ChecksumIndexInput; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.Bits; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.RamUsageEstimator; @@ -78,7 +79,7 @@ public class ES816BinaryQuantizedVectorsReader extends FlatVectorsReader { ES816BinaryQuantizedVectorsFormat.META_EXTENSION ); boolean success = false; - try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName, state.context)) { + try (ChecksumIndexInput meta = state.directory.openChecksumInput(metaFileName)) { Throwable priorE = null; try { versionMeta = CodecUtil.checkIndexHeader( @@ -102,7 +103,7 @@ public class ES816BinaryQuantizedVectorsReader extends FlatVectorsReader { ES816BinaryQuantizedVectorsFormat.VECTOR_DATA_CODEC_NAME, // Quantized vectors are accessed randomly from their node ID stored in the HNSW // graph. - state.context.withRandomAccess() + state.context.withReadAdvice(ReadAdvice.RANDOM) ); success = true; } finally { @@ -357,9 +358,9 @@ public class ES816BinaryQuantizedVectorsReader extends FlatVectorsReader { /** Binarized vector values holding row and quantized vector values */ protected static final class BinarizedVectorValues extends FloatVectorValues { private final FloatVectorValues rawVectorValues; - private final OffHeapBinarizedVectorValues quantizedVectorValues; + private final BinarizedByteVectorValues quantizedVectorValues; - BinarizedVectorValues(FloatVectorValues rawVectorValues, OffHeapBinarizedVectorValues quantizedVectorValues) { + BinarizedVectorValues(FloatVectorValues rawVectorValues, BinarizedByteVectorValues quantizedVectorValues) { this.rawVectorValues = rawVectorValues; this.quantizedVectorValues = quantizedVectorValues; } @@ -375,29 +376,28 @@ public class ES816BinaryQuantizedVectorsReader extends FlatVectorsReader { } @Override - public float[] vectorValue() throws IOException { - return rawVectorValues.vectorValue(); + public float[] vectorValue(int ord) throws IOException { + return rawVectorValues.vectorValue(ord); } @Override - public int docID() { - return rawVectorValues.docID(); + public BinarizedVectorValues copy() throws IOException { + return new BinarizedVectorValues(rawVectorValues.copy(), quantizedVectorValues.copy()); } @Override - public int nextDoc() throws IOException { - int rawDocId = rawVectorValues.nextDoc(); - int quantizedDocId = quantizedVectorValues.nextDoc(); - assert rawDocId == quantizedDocId; - return quantizedDocId; + public Bits getAcceptOrds(Bits acceptDocs) { + return rawVectorValues.getAcceptOrds(acceptDocs); } @Override - public int advance(int target) throws IOException { - int rawDocId = rawVectorValues.advance(target); - int quantizedDocId = quantizedVectorValues.advance(target); - assert rawDocId == quantizedDocId; - return quantizedDocId; + public int ordToDoc(int ord) { + return rawVectorValues.ordToDoc(ord); + } + + @Override + public DocIndexIterator iterator() { + return rawVectorValues.iterator(); } @Override @@ -405,7 +405,7 @@ public class ES816BinaryQuantizedVectorsReader extends FlatVectorsReader { return quantizedVectorValues.scorer(query); } - protected OffHeapBinarizedVectorValues getQuantizedVectorValues() throws IOException { + protected BinarizedByteVectorValues getQuantizedVectorValues() throws IOException { return quantizedVectorValues; } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java index 92837a8ffce4..a7774b850b64 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsWriter.java @@ -30,6 +30,7 @@ import org.apache.lucene.index.DocsWithFieldSet; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.MergeState; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Sorter; @@ -44,7 +45,6 @@ import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.VectorUtil; import org.apache.lucene.util.hnsw.CloseableRandomVectorScorerSupplier; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; import org.apache.lucene.util.hnsw.RandomVectorScorer; import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier; import org.elasticsearch.core.SuppressForbidden; @@ -354,10 +354,11 @@ public class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { int queryCorrectionCount = binaryQuantizer.getSimilarity() != EUCLIDEAN ? 5 : 3; final ByteBuffer queryCorrectionsBuffer = ByteBuffer.allocate(Float.BYTES * queryCorrectionCount + Short.BYTES) .order(ByteOrder.LITTLE_ENDIAN); - for (int docV = floatVectorValues.nextDoc(); docV != NO_MORE_DOCS; docV = floatVectorValues.nextDoc()) { + KnnVectorValues.DocIndexIterator iterator = floatVectorValues.iterator(); + for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) { // write index vector BinaryQuantizer.QueryAndIndexResults r = binaryQuantizer.quantizeQueryAndIndex( - floatVectorValues.vectorValue(), + floatVectorValues.vectorValue(iterator.index()), toIndex, toQuery, centroid @@ -393,11 +394,12 @@ public class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { static DocsWithFieldSet writeBinarizedVectorData(IndexOutput output, BinarizedByteVectorValues binarizedByteVectorValues) throws IOException { DocsWithFieldSet docsWithField = new DocsWithFieldSet(); - for (int docV = binarizedByteVectorValues.nextDoc(); docV != NO_MORE_DOCS; docV = binarizedByteVectorValues.nextDoc()) { + KnnVectorValues.DocIndexIterator iterator = binarizedByteVectorValues.iterator(); + for (int docV = iterator.nextDoc(); docV != NO_MORE_DOCS; docV = iterator.nextDoc()) { // write vector - byte[] binaryValue = binarizedByteVectorValues.vectorValue(); + byte[] binaryValue = binarizedByteVectorValues.vectorValue(iterator.index()); output.writeBytes(binaryValue, binaryValue.length); - float[] corrections = binarizedByteVectorValues.getCorrectiveTerms(); + float[] corrections = binarizedByteVectorValues.getCorrectiveTerms(iterator.index()); for (int i = 0; i < corrections.length; i++) { output.writeInt(Float.floatToIntBits(corrections[i])); } @@ -598,8 +600,9 @@ public class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { if (vectorValues == null) { continue; } - for (int doc = vectorValues.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = vectorValues.nextDoc()) { - float[] vector = vectorValues.vectorValue(); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) { + float[] vector = vectorValues.vectorValue(iterator.index()); // TODO Panama sum for (int j = 0; j < vector.length; j++) { centroid[j] += vector[j]; @@ -827,23 +830,31 @@ public class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { private final float[] centroid; private final FloatVectorValues values; private final BinaryQuantizer quantizer; - private int lastDoc; + private int lastOrd = -1; BinarizedFloatVectorValues(FloatVectorValues delegate, BinaryQuantizer quantizer, float[] centroid) { this.values = delegate; this.quantizer = quantizer; this.binarized = new byte[BQVectorUtils.discretize(delegate.dimension(), 64) / 8]; this.centroid = centroid; - lastDoc = -1; } @Override - public float[] getCorrectiveTerms() { + public float[] getCorrectiveTerms(int ord) { + if (ord != lastOrd) { + throw new IllegalStateException( + "attempt to retrieve corrective terms for different ord " + ord + " than the quantization was done for: " + lastOrd + ); + } return corrections; } @Override - public byte[] vectorValue() throws IOException { + public byte[] vectorValue(int ord) throws IOException { + if (ord != lastOrd) { + binarize(ord); + lastOrd = ord; + } return binarized; } @@ -852,57 +863,77 @@ public class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { return values.dimension(); } + @Override + public float getCentroidDistance(int vectorOrd) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public float getVectorMagnitude(int vectorOrd) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public float getOOQ(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public float getNormOC(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public float getODotC(int targetOrd) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public BinaryQuantizer getQuantizer() { + throw new UnsupportedOperationException(); + } + + @Override + public float[] getCentroid() throws IOException { + return centroid; + } + @Override public int size() { return values.size(); } - @Override - public int docID() { - return values.docID(); - } - - @Override - public int nextDoc() throws IOException { - int doc = values.nextDoc(); - if (doc != NO_MORE_DOCS) { - binarize(); - } - lastDoc = doc; - return doc; - } - - @Override - public int advance(int target) throws IOException { - int doc = values.advance(target); - if (doc != NO_MORE_DOCS) { - binarize(); - } - lastDoc = doc; - return doc; - } - @Override public VectorScorer scorer(float[] target) throws IOException { throw new UnsupportedOperationException(); } - private void binarize() throws IOException { - if (lastDoc == docID()) return; - corrections = quantizer.quantizeForIndex(values.vectorValue(), binarized, centroid); + @Override + public BinarizedByteVectorValues copy() throws IOException { + return new BinarizedFloatVectorValues(values.copy(), quantizer, centroid); + } + + private void binarize(int ord) throws IOException { + corrections = quantizer.quantizeForIndex(values.vectorValue(ord), binarized, centroid); + } + + @Override + public DocIndexIterator iterator() { + return values.iterator(); + } + + @Override + public int ordToDoc(int ord) { + return values.ordToDoc(ord); } } static class BinarizedCloseableRandomVectorScorerSupplier implements CloseableRandomVectorScorerSupplier { private final RandomVectorScorerSupplier supplier; - private final RandomAccessVectorValues vectorValues; + private final KnnVectorValues vectorValues; private final Closeable onClose; - BinarizedCloseableRandomVectorScorerSupplier( - RandomVectorScorerSupplier supplier, - RandomAccessVectorValues vectorValues, - Closeable onClose - ) { + BinarizedCloseableRandomVectorScorerSupplier(RandomVectorScorerSupplier supplier, KnnVectorValues vectorValues, Closeable onClose) { this.supplier = supplier; this.onClose = onClose; this.vectorValues = vectorValues; @@ -932,7 +963,6 @@ public class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { static final class NormalizedFloatVectorValues extends FloatVectorValues { private final FloatVectorValues values; private final float[] normalizedVector; - int curDoc = -1; NormalizedFloatVectorValues(FloatVectorValues values) { this.values = values; @@ -950,38 +980,25 @@ public class ES816BinaryQuantizedVectorsWriter extends FlatVectorsWriter { } @Override - public float[] vectorValue() { + public int ordToDoc(int ord) { + return values.ordToDoc(ord); + } + + @Override + public float[] vectorValue(int ord) throws IOException { + System.arraycopy(values.vectorValue(ord), 0, normalizedVector, 0, normalizedVector.length); + VectorUtil.l2normalize(normalizedVector); return normalizedVector; } @Override - public VectorScorer scorer(float[] query) { - throw new UnsupportedOperationException(); + public DocIndexIterator iterator() { + return values.iterator(); } @Override - public int docID() { - return values.docID(); - } - - @Override - public int nextDoc() throws IOException { - curDoc = values.nextDoc(); - if (curDoc != NO_MORE_DOCS) { - System.arraycopy(values.vectorValue(), 0, normalizedVector, 0, normalizedVector.length); - VectorUtil.l2normalize(normalizedVector); - } - return curDoc; - } - - @Override - public int advance(int target) throws IOException { - curDoc = values.advance(target); - if (curDoc != NO_MORE_DOCS) { - System.arraycopy(values.vectorValue(), 0, normalizedVector, 0, normalizedVector.length); - VectorUtil.l2normalize(normalizedVector); - } - return curDoc; + public NormalizedFloatVectorValues copy() throws IOException { + return new NormalizedFloatVectorValues(values.copy()); } } } diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java index 628480e273b3..e7d818bb752d 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/codec/vectors/OffHeapBinarizedVectorValues.java @@ -37,7 +37,7 @@ import static org.apache.lucene.index.VectorSimilarityFunction.EUCLIDEAN; import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; /** Binarized vector values loaded from off-heap */ -public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues implements RandomAccessBinarizedByteVectorValues { +public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorValues { protected final int dimension; protected final int size; @@ -131,7 +131,12 @@ public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorVa } @Override - public float[] getCorrectiveTerms() { + public float[] getCorrectiveTerms(int targetOrd) throws IOException { + if (lastOrd == targetOrd) { + return correctiveValues; + } + slice.seek(((long) targetOrd * byteSize) + numBytes); + slice.readFloats(correctiveValues, 0, correctionsCount); return correctiveValues; } @@ -195,11 +200,6 @@ public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorVa return centroid; } - @Override - public IndexInput getSlice() { - return slice; - } - @Override public int getVectorByteLength() { return numBytes; @@ -252,8 +252,6 @@ public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorVa /** Dense off-heap binarized vector values */ public static class DenseOffHeapVectorValues extends OffHeapBinarizedVectorValues { - private int doc = -1; - public DenseOffHeapVectorValues( int dimension, int size, @@ -267,30 +265,6 @@ public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorVa super(dimension, size, centroid, centroidDp, binaryQuantizer, similarityFunction, vectorsScorer, slice); } - @Override - public byte[] vectorValue() throws IOException { - return vectorValue(doc); - } - - @Override - public int docID() { - return doc; - } - - @Override - public int nextDoc() { - return advance(doc + 1); - } - - @Override - public int advance(int target) { - assert docID() < target; - if (target >= size) { - return doc = NO_MORE_DOCS; - } - return doc = target; - } - @Override public DenseOffHeapVectorValues copy() throws IOException { return new DenseOffHeapVectorValues( @@ -313,19 +287,25 @@ public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorVa @Override public VectorScorer scorer(float[] target) throws IOException { DenseOffHeapVectorValues copy = copy(); + DocIndexIterator iterator = copy.iterator(); RandomVectorScorer scorer = vectorsScorer.getRandomVectorScorer(similarityFunction, copy, target); return new VectorScorer() { @Override public float score() throws IOException { - return scorer.score(copy.doc); + return scorer.score(iterator.index()); } @Override public DocIdSetIterator iterator() { - return copy; + return iterator; } }; } + + @Override + public DocIndexIterator iterator() { + return createDenseIterator(); + } } /** Sparse off-heap binarized vector values */ @@ -355,27 +335,6 @@ public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorVa this.disi = configuration.getIndexedDISI(dataIn); } - @Override - public byte[] vectorValue() throws IOException { - return vectorValue(disi.index()); - } - - @Override - public int docID() { - return disi.docID(); - } - - @Override - public int nextDoc() throws IOException { - return disi.nextDoc(); - } - - @Override - public int advance(int target) throws IOException { - assert docID() < target; - return disi.advance(target); - } - @Override public SparseOffHeapVectorValues copy() throws IOException { return new SparseOffHeapVectorValues( @@ -415,19 +374,25 @@ public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorVa }; } + @Override + public DocIndexIterator iterator() { + return IndexedDISI.asDocIndexIterator(disi); + } + @Override public VectorScorer scorer(float[] target) throws IOException { SparseOffHeapVectorValues copy = copy(); + DocIndexIterator iterator = copy.iterator(); RandomVectorScorer scorer = vectorsScorer.getRandomVectorScorer(similarityFunction, copy, target); return new VectorScorer() { @Override public float score() throws IOException { - return scorer.score(copy.disi.index()); + return scorer.score(iterator.index()); } @Override public DocIdSetIterator iterator() { - return copy; + return iterator; } }; } @@ -441,23 +406,8 @@ public abstract class OffHeapBinarizedVectorValues extends BinarizedByteVectorVa } @Override - public int docID() { - return doc; - } - - @Override - public int nextDoc() { - return advance(doc + 1); - } - - @Override - public int advance(int target) { - return doc = NO_MORE_DOCS; - } - - @Override - public byte[] vectorValue() { - throw new UnsupportedOperationException(); + public DocIndexIterator iterator() { + return createDenseIterator(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java b/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java deleted file mode 100644 index 5163baf617c2..000000000000 --- a/server/src/main/java/org/elasticsearch/index/codec/vectors/RandomAccessBinarizedByteVectorValues.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * @notice - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * Modifications copyright (C) 2024 Elasticsearch B.V. - */ -package org.elasticsearch.index.codec.vectors; - -import org.apache.lucene.util.VectorUtil; -import org.apache.lucene.util.hnsw.RandomAccessVectorValues; - -import java.io.IOException; - -import static org.elasticsearch.index.codec.vectors.BQVectorUtils.constSqrt; - -/** - * Copied from Lucene, replace with Lucene's implementation sometime after Lucene 10 - */ -public interface RandomAccessBinarizedByteVectorValues extends RandomAccessVectorValues.Bytes { - /** Returns the centroid distance for the vector */ - float getCentroidDistance(int vectorOrd) throws IOException; - - /** Returns the vector magnitude for the vector */ - float getVectorMagnitude(int vectorOrd) throws IOException; - - /** Returns OOQ corrective factor for the given vector ordinal */ - float getOOQ(int targetOrd) throws IOException; - - /** - * Returns the norm of the target vector w the centroid corrective factor for the given vector - * ordinal - */ - float getNormOC(int targetOrd) throws IOException; - - /** - * Returns the target vector dot product the centroid corrective factor for the given vector - * ordinal - */ - float getODotC(int targetOrd) throws IOException; - - /** - * @return the quantizer used to quantize the vectors - */ - BinaryQuantizer getQuantizer(); - - default int discretizedDimensions() { - return BQVectorUtils.discretize(dimension(), 64); - } - - default float sqrtDimensions() { - return (float) constSqrt(dimension()); - } - - default float maxX1() { - return (float) (1.9 / constSqrt(discretizedDimensions() - 1.0)); - } - - /** - * @return coarse grained centroids for the vectors - */ - float[] getCentroid() throws IOException; - - @Override - RandomAccessBinarizedByteVectorValues copy() throws IOException; - - default float getCentroidDP() throws IOException { - // this only gets executed on-merge - float[] centroid = getCentroid(); - return VectorUtil.dotProduct(centroid, centroid); - } -} diff --git a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java index 05cc6d148be5..e44b344d3b28 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LuceneChangesSnapshot.java @@ -119,7 +119,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { this.parallelArray = new ParallelArray(this.searchBatchSize); this.indexVersionCreated = indexVersionCreated; final TopDocs topDocs = searchOperations(null, accessStats); - this.totalHits = Math.toIntExact(topDocs.totalHits.value); + this.totalHits = Math.toIntExact(topDocs.totalHits.value()); this.scoreDocs = topDocs.scoreDocs; fillParallelArray(scoreDocs, parallelArray); } @@ -341,7 +341,7 @@ final class LuceneChangesSnapshot implements Translog.Snapshot { assert storedFieldsReaderOrd == leaf.ord : storedFieldsReaderOrd + " != " + leaf.ord; storedFieldsReader.document(segmentDocID, fields); } else { - leaf.reader().document(segmentDocID, fields); + leaf.reader().storedFields().document(segmentDocID, fields); } final Translog.Operation op; diff --git a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java index 18b5ba69ca32..3e99818d1827 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java +++ b/server/src/main/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicy.java @@ -13,6 +13,7 @@ import org.apache.lucene.codecs.DocValuesProducer; import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CodecReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FilterCodecReader; import org.apache.lucene.index.FilterNumericDocValues; @@ -188,6 +189,11 @@ final class RecoverySourcePruneMergePolicy extends OneMergeWrappingMergePolicy { return in.getSortedSet(field); } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return in.getSkipper(field); + } + @Override public void checkIntegrity() throws IOException { in.checkIntegrity(); diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java index c7acd730fadb..0f772b49bf92 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/index/engine/TranslogDirectoryReader.java @@ -13,10 +13,11 @@ import org.apache.lucene.index.BaseTermsEnum; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.ImpactsEnum; import org.apache.lucene.index.IndexCommit; @@ -152,6 +153,7 @@ final class TranslogDirectoryReader extends DirectoryReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -171,6 +173,7 @@ final class TranslogDirectoryReader extends DirectoryReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -190,6 +193,7 @@ final class TranslogDirectoryReader extends DirectoryReader { false, IndexOptions.DOCS, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -346,6 +350,11 @@ final class TranslogDirectoryReader extends DirectoryReader { return getDelegate().getNormValues(field); } + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + return getDelegate().getDocValuesSkipper(field); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { return getDelegate().getFloatVectorValues(field); @@ -389,11 +398,6 @@ final class TranslogDirectoryReader extends DirectoryReader { return getDelegate().getMetaData(); } - @Override - public Fields getTermVectors(int docID) throws IOException { - return getDelegate().getTermVectors(docID); - } - @Override public TermVectors termVectors() throws IOException { return getDelegate().termVectors(); @@ -429,11 +433,6 @@ final class TranslogDirectoryReader extends DirectoryReader { return 1; } - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - private void readStoredFieldsDirectly(StoredFieldVisitor visitor) throws IOException { if (visitor.needsField(FAKE_SOURCE_FIELD) == StoredFieldVisitor.Status.YES) { BytesReference sourceBytes = operation.source(); diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java index 84e85f3ddf2b..d4e34181b876 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java @@ -52,12 +52,7 @@ final class GlobalOrdinalMapping extends SortedSetDocValues { @Override public long nextOrd() throws IOException { - long segmentOrd = values.nextOrd(); - if (segmentOrd == SortedSetDocValues.NO_MORE_ORDS) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - return getGlobalOrd(segmentOrd); - } + return getGlobalOrd(values.nextOrd()); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java index 0439383ccbd0..0f72e491d811 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java @@ -40,13 +40,13 @@ public class MultiOrdinals extends Ordinals { float acceptableOverheadRatio ) { int bitsPerOrd = PackedInts.bitsRequired(numOrds); - bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue; + bitsPerOrd = PackedInts.fastestFormatAndBits(numDocsWithValue, bitsPerOrd, acceptableOverheadRatio).bitsPerValue(); // Compute the worst-case number of bits per value for offsets in the worst case, eg. if no docs have a value at the // beginning of the block and all docs have one at the end of the block final float avgValuesPerDoc = (float) numDocsWithValue / maxDoc; final int maxDelta = (int) Math.ceil(OFFSETS_PAGE_SIZE * (1 - avgValuesPerDoc) * avgValuesPerDoc); int bitsPerOffset = PackedInts.bitsRequired(maxDelta) + 1; // +1 because of the sign - bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue; + bitsPerOffset = PackedInts.fastestFormatAndBits(maxDoc, bitsPerOffset, acceptableOverheadRatio).bitsPerValue(); final long expectedMultiSizeInBytes = (long) numDocsWithValue * bitsPerOrd + (long) maxDoc * bitsPerOffset; final long expectedSingleSizeInBytes = (long) maxDoc * bitsPerOrd; @@ -153,6 +153,7 @@ public class MultiOrdinals extends Ordinals { private long currentOffset; private long currentEndOffset; + private int count; MultiDocs(MultiOrdinals ordinals, ValuesHolder values) { this.valueCount = ordinals.valueCount; @@ -170,21 +171,19 @@ public class MultiOrdinals extends Ordinals { public boolean advanceExact(int docId) { currentOffset = docId != 0 ? endOffsets.get(docId - 1) : 0; currentEndOffset = endOffsets.get(docId); + count = Math.toIntExact(currentEndOffset - currentOffset); return currentOffset != currentEndOffset; } @Override public long nextOrd() { - if (currentOffset == currentEndOffset) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - return ords.get(currentOffset++); - } + assert currentOffset != currentEndOffset; + return ords.get(currentOffset++); } @Override public int docValueCount() { - return Math.toIntExact(currentEndOffset - currentOffset); + return count; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java index 57572dea8ac0..d05f0e477db0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DateFieldMapper.java @@ -11,6 +11,7 @@ package org.elasticsearch.index.mapper; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.apache.lucene.document.Field; import org.apache.lucene.document.LongField; import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; @@ -687,7 +688,7 @@ public final class DateFieldMapper extends FieldMapper { long pivotLong = resolution.convert(pivotTime); // As we already apply boost in AbstractQueryBuilder::toQuery, we always passing a boost of 1.0 to distanceFeatureQuery if (isIndexed()) { - return LongPoint.newDistanceFeatureQuery(name(), 1.0f, originLong, pivotLong); + return LongField.newDistanceFeatureQuery(name(), 1.0f, originLong, pivotLong); } else { return new LongScriptFieldDistanceFeatureQuery( new Script(""), @@ -959,7 +960,7 @@ public final class DateFieldMapper extends FieldMapper { } if (indexed && hasDocValues) { - context.doc().add(new LongField(fieldType().name(), timestamp)); + context.doc().add(new LongField(fieldType().name(), timestamp, Field.Store.NO)); } else if (hasDocValues) { context.doc().add(new SortedNumericDocValuesField(fieldType().name(), timestamp)); } else if (indexed) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java index 494005ce12cb..d37f6c51d288 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentLeafReader.java @@ -11,10 +11,11 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.DocValuesSkipIndexType; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -147,11 +148,6 @@ class DocumentLeafReader extends LeafReader { return new FieldInfos(new FieldInfo[0]); } - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - @Override public StoredFields storedFields() throws IOException { return new StoredFields() { @@ -203,6 +199,11 @@ class DocumentLeafReader extends LeafReader { throw new UnsupportedOperationException(); } + @Override + public DocValuesSkipper getDocValuesSkipper(String s) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { throw new UnsupportedOperationException(); @@ -233,11 +234,6 @@ class DocumentLeafReader extends LeafReader { throw new UnsupportedOperationException(); } - @Override - public Fields getTermVectors(int docID) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public int numDocs() { throw new UnsupportedOperationException(); @@ -284,6 +280,7 @@ class DocumentLeafReader extends LeafReader { false, IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, Collections.emptyMap(), 0, @@ -484,9 +481,7 @@ class DocumentLeafReader extends LeafReader { @Override public long nextOrd() { i++; - if (i >= values.size()) { - return NO_MORE_ORDS; - } + assert i < values.size(); return i; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java index b9d89462c346..8e418f45ddb3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IdFieldMapper.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Map; /** @@ -97,13 +98,13 @@ public abstract class IdFieldMapper extends MetadataFieldMapper { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = values.stream().map(v -> { + List bytesRefs = values.stream().map(v -> { Object idObject = v; if (idObject instanceof BytesRef) { idObject = ((BytesRef) idObject).utf8ToString(); } return Uid.encodeId(idObject.toString()); - }).toArray(BytesRef[]::new); + }).toList(); return new TermInSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java index 6900dcd77391..8114167c0248 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtil.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.ArrayList; @@ -76,8 +75,8 @@ public class IpPrefixAutomatonUtil { } else { result = Automata.makeAnyBinary(); } - result = MinimizationOperations.minimize(result, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - return new CompiledAutomaton(result, null, false, 0, true); + result = Operations.determinize(result, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return new CompiledAutomaton(result, false, false, true); } private static Automaton getIpv6Automaton(String ipPrefix) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 1ff9fd2f699c..802680e7f373 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -32,7 +32,6 @@ import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; import org.apache.lucene.util.automaton.CompiledAutomaton.AUTOMATON_TYPE; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; @@ -491,7 +490,7 @@ public final class KeywordFieldMapper extends FieldMapper { if (isIndexed()) { return super.termsQuery(values, context); } else { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + Collection bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return SortedSetDocValuesField.newSlowSetQuery(name(), bytesRefs); } } @@ -597,7 +596,6 @@ public final class KeywordFieldMapper extends FieldMapper { ? AutomatonQueries.caseInsensitivePrefix(prefix) : Operations.concatenate(Automata.makeString(prefix), Automata.makeAnyString()); assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); CompiledAutomaton automaton = new CompiledAutomaton(a, true, true); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java index f1924fd04f3f..c6f1b490a2be 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LegacyTypeFieldMapper.java @@ -11,7 +11,6 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.SearchExecutionContext; @@ -70,7 +69,7 @@ public class LegacyTypeFieldMapper extends MetadataFieldMapper { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + var bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return SortedSetDocValuesField.newSlowSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 2e815554dc82..3608e8ab261c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -11,6 +11,7 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.DoublePoint; +import org.apache.lucene.document.Field; import org.apache.lucene.document.FloatField; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.IntField; @@ -589,7 +590,7 @@ public class NumberFieldMapper extends FieldMapper { public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final float f = value.floatValue(); if (indexed && docValued) { - document.add(new FloatField(name, f)); + document.add(new FloatField(name, f, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(f))); } else if (indexed) { @@ -743,7 +744,7 @@ public class NumberFieldMapper extends FieldMapper { public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final double d = value.doubleValue(); if (indexed && docValued) { - document.add(new DoubleField(name, d)); + document.add(new DoubleField(name, d, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(d))); } else if (indexed) { @@ -1179,7 +1180,7 @@ public class NumberFieldMapper extends FieldMapper { public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final int i = value.intValue(); if (indexed && docValued) { - document.add(new IntField(name, i)); + document.add(new IntField(name, i, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, i)); } else if (indexed) { @@ -1330,7 +1331,7 @@ public class NumberFieldMapper extends FieldMapper { public void addFields(LuceneDocument document, String name, Number value, boolean indexed, boolean docValued, boolean stored) { final long l = value.longValue(); if (indexed && docValued) { - document.add(new LongField(name, l)); + document.add(new LongField(name, l, Field.Store.NO)); } else if (docValued) { document.add(new SortedNumericDocValuesField(name, l)); } else if (indexed) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java index 9ea16933f7ab..ceb96b87a098 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/StringFieldType.java @@ -101,9 +101,7 @@ public abstract class StringFieldType extends TermBasedFieldType { failIfNotIndexed(); Term prefix = new Term(name(), indexedValueForSearch(value)); if (caseInsensitive) { - return method == null - ? new CaseInsensitivePrefixQuery(prefix, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new CaseInsensitivePrefixQuery(prefix, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + return method == null ? new CaseInsensitivePrefixQuery(prefix, false) : new CaseInsensitivePrefixQuery(prefix, false, method); } return method == null ? new PrefixQuery(prefix) : new PrefixQuery(prefix, method); } @@ -170,9 +168,7 @@ public abstract class StringFieldType extends TermBasedFieldType { term = new Term(name(), indexedValueForSearch(value)); } if (caseInsensitive) { - return method == null - ? new CaseInsensitiveWildcardQuery(term) - : new CaseInsensitiveWildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + return method == null ? new CaseInsensitiveWildcardQuery(term) : new CaseInsensitiveWildcardQuery(term, false, method); } return method == null ? new WildcardQuery(term) : new WildcardQuery(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, method); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java index 674a016264c3..e2ff9cc7ea63 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TermBasedFieldType.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.index.query.SearchExecutionContext; import java.util.Collection; +import java.util.List; import java.util.Map; /** Base {@link MappedFieldType} implementation for a field that is indexed @@ -69,7 +70,7 @@ public abstract class TermBasedFieldType extends SimpleMappedFieldType { @Override public Query termsQuery(Collection values, SearchExecutionContext context) { failIfNotIndexed(); - BytesRef[] bytesRefs = values.stream().map(this::indexedValueForSearch).toArray(BytesRef[]::new); + List bytesRefs = values.stream().map(this::indexedValueForSearch).toList(); return new TermInSetQuery(name(), bytesRefs); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java index 642539fbbc2f..3f77edc81960 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TextFieldMapper.java @@ -598,8 +598,8 @@ public final class TextFieldMapper extends FieldMapper { } Automaton automaton = Operations.concatenate(automata); AutomatonQuery query = method == null - ? new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false) - : new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method); + ? new AutomatonQuery(new Term(name(), value + "*"), automaton, false) + : new AutomatonQuery(new Term(name(), value + "*"), automaton, false, method); return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD) .add(new TermQuery(new Term(parentField.name(), value)), BooleanClause.Occur.SHOULD) .build(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java index ac1de94ea7a7..93a2157b2338 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/FlattenedFieldMapper.java @@ -28,10 +28,10 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.SortField; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -394,7 +394,6 @@ public final class FlattenedFieldMapper extends FieldMapper { a = Operations.concatenate(a, Automata.makeAnyString()); } assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); CompiledAutomaton automaton = new CompiledAutomaton(a); if (searchAfter != null) { @@ -483,6 +482,11 @@ public final class FlattenedFieldMapper extends FieldMapper { throw new UnsupportedOperationException(); } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public boolean seekExact(BytesRef text) throws IOException { throw new UnsupportedOperationException(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java index b94ea67c8de8..b29f093e3a21 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldData.java @@ -205,12 +205,8 @@ public class KeyedFlattenedLeafFieldData implements LeafOrdinalsFieldData { } long ord = delegate.nextOrd(); - if (ord != NO_MORE_ORDS && ord <= maxOrd) { - assert ord >= minOrd; - return mapOrd(ord); - } else { - return NO_MORE_ORDS; - } + assert ord <= maxOrd; + return mapOrd(ord); } @Override @@ -223,9 +219,9 @@ public class KeyedFlattenedLeafFieldData implements LeafOrdinalsFieldData { if (delegate.advanceExact(target)) { int count = 0; - while (true) { + for (int i = 0; i < delegate.docValueCount(); i++) { long ord = delegate.nextOrd(); - if (ord == NO_MORE_ORDS || ord > maxOrd) { + if (ord > maxOrd) { break; } if (ord >= minOrd) { @@ -246,7 +242,7 @@ public class KeyedFlattenedLeafFieldData implements LeafOrdinalsFieldData { while (true) { long ord = delegate.nextOrd(); - if (ord == NO_MORE_ORDS || ord > maxOrd) { + if (ord > maxOrd) { break; } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java index e8da3b72ae7c..04069333deb1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValues.java @@ -45,24 +45,13 @@ public class DenormalizedCosineFloatVectorValues extends FloatVectorValues { } @Override - public float[] vectorValue() throws IOException { - // Lazy load vectors as we may iterate but not actually require the vector - return vectorValue(in.docID()); + public DocIndexIterator iterator() { + return in.iterator(); } @Override - public int docID() { - return in.docID(); - } - - @Override - public int nextDoc() throws IOException { - return in.nextDoc(); - } - - @Override - public int advance(int target) throws IOException { - return in.advance(target); + public FloatVectorValues copy() throws IOException { + return in.copy(); } @Override @@ -74,22 +63,24 @@ public class DenormalizedCosineFloatVectorValues extends FloatVectorValues { return magnitude; } - private float[] vectorValue(int docId) throws IOException { + @Override + public float[] vectorValue(int ord) throws IOException { + int docId = ordToDoc(ord); if (docId != this.docId) { this.docId = docId; hasMagnitude = decodedMagnitude(docId); // We should only copy and transform if we have a stored a non-unit length magnitude if (hasMagnitude) { - System.arraycopy(in.vectorValue(), 0, vector, 0, dimension()); + System.arraycopy(in.vectorValue(ord), 0, vector, 0, dimension()); for (int i = 0; i < vector.length; i++) { vector[i] *= magnitude; } return vector; } else { - return in.vectorValue(); + return in.vectorValue(ord); } } else { - return hasMagnitude ? vector : in.vectorValue(); + return hasMagnitude ? vector : in.vectorValue(ord); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java index a023837a0efb..809532c0e8f5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/vectors/DenseVectorFieldMapper.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentReadState; @@ -2309,6 +2310,7 @@ public class DenseVectorFieldMapper extends FieldMapper { private ByteVectorValues byteVectorValues; private boolean hasValue; private boolean hasMagnitude; + private int ord; private final IndexVersion indexCreatedVersion; private final VectorSimilarity vectorSimilarity; @@ -2326,16 +2328,20 @@ public class DenseVectorFieldMapper extends FieldMapper { if (indexCreatedVersion.onOrAfter(NORMALIZE_COSINE) && VectorSimilarity.COSINE.equals(vectorSimilarity)) { magnitudeReader = leafReader.getNumericDocValues(fullPath() + COSINE_MAGNITUDE_FIELD_SUFFIX); } + KnnVectorValues.DocIndexIterator iterator = values.iterator(); return docId -> { - hasValue = docId == values.advance(docId); + hasValue = docId == iterator.advance(docId); hasMagnitude = hasValue && magnitudeReader != null && magnitudeReader.advanceExact(docId); + ord = iterator.index(); return hasValue; }; } byteVectorValues = leafReader.getByteVectorValues(fullPath()); if (byteVectorValues != null) { + KnnVectorValues.DocIndexIterator iterator = byteVectorValues.iterator(); return docId -> { - hasValue = docId == byteVectorValues.advance(docId); + hasValue = docId == iterator.advance(docId); + ord = iterator.index(); return hasValue; }; } @@ -2358,7 +2364,7 @@ public class DenseVectorFieldMapper extends FieldMapper { } b.startArray(leafName()); if (values != null) { - for (float v : values.vectorValue()) { + for (float v : values.vectorValue(ord)) { if (hasMagnitude) { b.value(v * magnitude); } else { @@ -2366,7 +2372,7 @@ public class DenseVectorFieldMapper extends FieldMapper { } } } else if (byteVectorValues != null) { - byte[] vectorValue = byteVectorValues.vectorValue(); + byte[] vectorValue = byteVectorValues.vectorValue(ord); for (byte value : vectorValue) { b.value(value); } diff --git a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java index 16aada4066f7..1560004b1378 100644 --- a/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/CombinedFieldsQueryBuilder.java @@ -412,8 +412,8 @@ public final class CombinedFieldsQueryBuilder extends AbstractQueryBuilder clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -204,7 +204,7 @@ public abstract class IntervalBuilder { TokenStream ts = it.next(); IntervalsSource phrase = combineSources(analyzeTerms(ts), 0, true); if (paths.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } paths.add(phrase); } diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 55642ccf0275..626875c75a5f 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -16,8 +16,8 @@ import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Query; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopFieldCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopFieldCollectorManager; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.search.TotalHitCountCollector; import org.apache.lucene.search.TotalHits; import org.apache.lucene.search.Weight; @@ -443,12 +443,12 @@ public class NestedQueryBuilder extends AbstractQueryBuilder TopDocsCollector topDocsCollector; MaxScoreCollector maxScoreCollector = null; if (sort() != null) { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE); + topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, null, Integer.MAX_VALUE, false).newCollector(); if (trackScores()) { maxScoreCollector = new MaxScoreCollector(); } } else { - topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); + topDocsCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false).newCollector(); maxScoreCollector = new MaxScoreCollector(); } intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java index 6072a81691ff..30921d22a8d8 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpFlag.java @@ -10,9 +10,12 @@ package org.elasticsearch.index.query; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.common.Strings; +import org.elasticsearch.core.UpdateForV10; import java.util.Locale; +import static org.apache.lucene.util.automaton.RegExp.DEPRECATED_COMPLEMENT; + /** * Regular expression syntax flags. Each flag represents optional syntax support in the regular expression: *

    @@ -37,8 +40,11 @@ public enum RegexpFlag { /** * Enables complement expression of the form: {@code ~<expression>} + * We use the deprecated support in Lucene 10. Will be removed in Lucene 11 + * https://github.com/elastic/elasticsearch/issues/113465 */ - COMPLEMENT(RegExp.COMPLEMENT), + @UpdateForV10(owner = UpdateForV10.Owner.SEARCH_FOUNDATIONS) + COMPLEMENT(DEPRECATED_COMPLEMENT), /** * Enables empty language expression: {@code #} @@ -63,7 +69,7 @@ public enum RegexpFlag { /** * Enables all available option flags */ - ALL(RegExp.ALL); + ALL(RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); final int value; diff --git a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java index dc439fab58ff..461dc6632243 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RegexpQueryBuilder.java @@ -280,7 +280,9 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder int matchFlagsValue = caseInsensitive ? RegExp.ASCII_CASE_INSENSITIVE : 0; Query query = null; // For BWC we mask irrelevant bits (RegExp changed ALL from 0xffff to 0xff) - int sanitisedSyntaxFlag = syntaxFlagsValue & RegExp.ALL; + // We need to preserve the DEPRECATED_COMPLEMENT for now though + int deprecatedComplementFlag = syntaxFlagsValue & RegExp.DEPRECATED_COMPLEMENT; + int sanitisedSyntaxFlag = syntaxFlagsValue & (RegExp.ALL | deprecatedComplementFlag); MappedFieldType fieldType = context.getFieldType(fieldName); if (fieldType != null) { diff --git a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java index c96771978bd4..8d3fd1d92e1e 100644 --- a/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/ScriptQueryBuilder.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.ElasticsearchException; @@ -184,7 +185,7 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final FilterScript leafScript = filterScript.newInstance(new DocValuesDocReader(lookup, context)); TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -201,7 +202,8 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder return 1000f; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java index 81afdf0ebe5e..a6116ccf2c49 100644 --- a/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/TermsSetQueryBuilder.java @@ -12,7 +12,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.Term; import org.apache.lucene.sandbox.search.CoveringQuery; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DoubleValues; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LongValues; @@ -273,8 +272,8 @@ public final class TermsSetQueryBuilder extends AbstractQueryBuilder BooleanQuery.getMaxClauseCount()) { - throw new BooleanQuery.TooManyClauses(); + if (values.size() > IndexSearcher.getMaxClauseCount()) { + throw new IndexSearcher.TooManyClauses(); } List queries = createTermQueries(context); diff --git a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java index 528f0bd6dae0..1327721a8842 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/ClientScrollableHitSource.java @@ -149,7 +149,7 @@ public class ClientScrollableHitSource extends ScrollableHitSource { } hits = unmodifiableList(hits); } - long total = response.getHits().getTotalHits().value; + long total = response.getHits().getTotalHits().value(); return new Response(response.isTimedOut(), failures, total, hits, response.getScrollId()); } diff --git a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java index 505c20f64209..5f135c674ba1 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MatchQueryParser.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostAttribute; import org.apache.lucene.search.FuzzyQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MultiTermQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.QueryBuilder; @@ -690,7 +691,7 @@ public class MatchQueryParser { List clauses = new ArrayList<>(); int[] articulationPoints = graph.articulationPoints(); int lastState = 0; - int maxClauseCount = BooleanQuery.getMaxClauseCount(); + int maxClauseCount = IndexSearcher.getMaxClauseCount(); for (int i = 0; i <= articulationPoints.length; i++) { int start = lastState; int end = -1; @@ -708,7 +709,7 @@ public class MatchQueryParser { SpanQuery q = createSpanQuery(ts, field, usePrefix); if (q != null) { if (queries.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queries.add(q); } @@ -722,14 +723,14 @@ public class MatchQueryParser { Term[] terms = graph.getTerms(field, start); assert terms.length > 0; if (terms.length >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } queryPos = newSpanQuery(terms, usePrefix); } if (queryPos != null) { if (clauses.size() >= maxClauseCount) { - throw new BooleanQuery.TooManyClauses(); + throw new IndexSearcher.TooManyClauses(); } clauses.add(queryPos); } diff --git a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java index 52122ed86ef6..446d78078e64 100644 --- a/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/MultiMatchQueryParser.java @@ -200,7 +200,7 @@ public class MultiMatchQueryParser extends MatchQueryParser { protected Query newSynonymQuery(String field, TermAndBoost[] terms) { BytesRef[] values = new BytesRef[terms.length]; for (int i = 0; i < terms.length; i++) { - values[i] = terms[i].term; + values[i] = terms[i].term(); } return blendTerms(context, values, tieBreaker, lenient, blendedFields); } diff --git a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java index db0077284bbd..96e8ac35c8e3 100644 --- a/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java +++ b/server/src/main/java/org/elasticsearch/index/search/NestedHelper.java @@ -24,8 +24,6 @@ import org.apache.lucene.search.TermQuery; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NestedObjectMapper; -import java.io.IOException; -import java.io.UncheckedIOException; import java.util.function.Predicate; /** Utility class to filter parent and children clauses when building nested @@ -55,15 +53,10 @@ public final class NestedHelper { // cover a high majority of use-cases return mightMatchNestedDocs(((TermQuery) query).getTerm().field()); } else if (query instanceof TermInSetQuery tis) { - try { - if (tis.getTermsCount() > 0) { - return mightMatchNestedDocs(tis.getField()); - } else { - return false; - } - } catch (IOException e) { - // this handling isn't needed any more once we move to Lucene 10 - throw new UncheckedIOException("We are not doing IO here, this should never happen.", e); + if (tis.getTermsCount() > 0) { + return mightMatchNestedDocs(tis.getField()); + } else { + return false; } } else if (query instanceof PointRangeQuery) { return mightMatchNestedDocs(((PointRangeQuery) query).getField()); @@ -75,13 +68,13 @@ public final class NestedHelper { return bq.clauses() .stream() .filter(BooleanClause::isRequired) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .allMatch(this::mightMatchNestedDocs); } else { return bq.clauses() .stream() - .filter(c -> c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(this::mightMatchNestedDocs); } } else if (query instanceof ESToParentBlockJoinQuery) { @@ -122,15 +115,10 @@ public final class NestedHelper { } else if (query instanceof TermQuery) { return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath); } else if (query instanceof TermInSetQuery tis) { - try { - if (tis.getTermsCount() > 0) { - return mightMatchNonNestedDocs(tis.getField(), nestedPath); - } else { - return false; - } - } catch (IOException e) { - // this handling isn't needed any more once we move to Lucene 10 - throw new UncheckedIOException("We are not doing IO here, this should never happen.", e); + if (tis.getTermsCount() > 0) { + return mightMatchNonNestedDocs(tis.getField(), nestedPath); + } else { + return false; } } else if (query instanceof PointRangeQuery) { return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath); @@ -142,13 +130,13 @@ public final class NestedHelper { return bq.clauses() .stream() .filter(BooleanClause::isRequired) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .allMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } else { return bq.clauses() .stream() - .filter(c -> c.getOccur() == Occur.SHOULD) - .map(BooleanClause::getQuery) + .filter(c -> c.occur() == Occur.SHOULD) + .map(BooleanClause::query) .anyMatch(q -> mightMatchNonNestedDocs(q, nestedPath)); } } else { @@ -183,5 +171,4 @@ public final class NestedHelper { } return true; } - } diff --git a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java index 76dba6068942..d237a0333533 100644 --- a/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java +++ b/server/src/main/java/org/elasticsearch/index/search/QueryStringQueryParser.java @@ -760,7 +760,14 @@ public class QueryStringQueryParser extends QueryParser { setAnalyzer(forceAnalyzer); return super.getRegexpQuery(field, termStr); } - return currentFieldType.regexpQuery(termStr, RegExp.ALL, 0, getDeterminizeWorkLimit(), getMultiTermRewriteMethod(), context); + return currentFieldType.regexpQuery( + termStr, + RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT, + 0, + getDeterminizeWorkLimit(), + getMultiTermRewriteMethod(), + context + ); } catch (RuntimeException e) { if (lenient) { return newLenientFieldQuery(field, e); diff --git a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java index 562bf1e75dc1..97d1b3342ca2 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/index/shard/RemoveCorruptedLuceneSegmentsAction.java @@ -33,7 +33,7 @@ public class RemoveCorruptedLuceneSegmentsAction { final CheckIndex.Status status; try (CheckIndex checker = new CheckIndex(indexDirectory, writeLock)) { - checker.setChecksumsOnly(true); + checker.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); checker.setInfoStream(printStream, verbose); status = checker.checkIndex(null); @@ -64,7 +64,7 @@ public class RemoveCorruptedLuceneSegmentsAction { final CheckIndex.Status status; try (CheckIndex checker = new CheckIndex(indexDirectory, writeLock)) { - checker.setChecksumsOnly(true); + checker.setLevel(CheckIndex.Level.MIN_LEVEL_FOR_CHECKSUM_CHECKS); checker.setInfoStream(printStream, verbose); status = checker.checkIndex(null); diff --git a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java index f1291ac6faa5..94a29258f320 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java +++ b/server/src/main/java/org/elasticsearch/index/shard/ShardSplittingQuery.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.search.join.BitSetProducer; @@ -73,7 +74,7 @@ final class ShardSplittingQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { LeafReader leafReader = context.reader(); FixedBitSet bitSet = new FixedBitSet(leafReader.maxDoc()); Terms terms = leafReader.terms(RoutingFieldMapper.NAME); @@ -82,87 +83,102 @@ final class ShardSplittingQuery extends Query { int targetShardId = indexRouting.getShard(Uid.decodeId(ref.bytes, ref.offset, ref.length), null); return shardId == targetShardId; }; - if (terms == null) { - // this is the common case - no partitioning and no _routing values - // in this case we also don't do anything special with regards to nested docs since we basically delete - // by ID and parent and nested all have the same id. - assert indexMetadata.isRoutingPartitionedIndex() == false; - findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, bitSet::set); - } else { - final BitSet parentBitSet; - if (nestedParentBitSetProducer == null) { - parentBitSet = null; - } else { - parentBitSet = nestedParentBitSetProducer.getBitSet(context); - if (parentBitSet == null) { - return null; // no matches - } - } - if (indexMetadata.isRoutingPartitionedIndex()) { - // this is the heaviest invariant. Here we have to visit all docs stored fields do extract _id and _routing - // this index is routing partitioned. - Visitor visitor = new Visitor(leafReader); - TwoPhaseIterator twoPhaseIterator = parentBitSet == null - ? new RoutingPartitionedDocIdSetIterator(visitor) - : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); - return new ConstantScoreScorer(this, score(), scoreMode, twoPhaseIterator); - } else { - // here we potentially guard the docID consumers with our parent bitset if we have one. - // this ensures that we are only marking root documents in the nested case and if necessary - // we do a second pass to mark the corresponding children in markChildDocs - Function maybeWrapConsumer = consumer -> { - if (parentBitSet != null) { - return docId -> { - if (parentBitSet.get(docId)) { - consumer.accept(docId); - } - }; - } - return consumer; - }; - // in the _routing case we first go and find all docs that have a routing value and mark the ones we have to delete - findSplitDocs(RoutingFieldMapper.NAME, ref -> { - int targetShardId = indexRouting.getShard(null, ref.utf8ToString()); - return shardId == targetShardId; - }, leafReader, maybeWrapConsumer.apply(bitSet::set)); - // TODO have the IndexRouting build the query and pass routingRequired in - boolean routingRequired = indexMetadata.mapping() == null ? false : indexMetadata.mapping().routingRequired(); - // now if we have a mixed index where some docs have a _routing value and some don't we have to exclude the ones - // with a routing value from the next iteration and delete / select based on the ID. - if (routingRequired == false && terms.getDocCount() != leafReader.maxDoc()) { - /* - * This is a special case where some docs don't have routing values. - * It's annoying, but it's allowed to build an index where some documents - * hve routing and others don't. - * - * Luckily, if the routing field is required in the mapping then we can - * safely assume that all documents which are don't have a routing are - * nested documents. And we pick those up later based on the assignment - * of the document that contains them. - */ - FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); - findSplitDocs( - RoutingFieldMapper.NAME, - Predicates.never(), - leafReader, - maybeWrapConsumer.apply(hasRoutingValue::set) - ); - IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); - findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { - if (hasRoutingValue.get(docId) == false) { - bitSetConsumer.accept(docId); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + if (terms == null) { + // this is the common case - no partitioning and no _routing values + // in this case we also don't do anything special with regards to nested docs since we basically delete + // by ID and parent and nested all have the same id. + assert indexMetadata.isRoutingPartitionedIndex() == false; + findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, bitSet::set); + } else { + final BitSet parentBitSet; + if (nestedParentBitSetProducer == null) { + parentBitSet = null; + } else { + parentBitSet = nestedParentBitSetProducer.getBitSet(context); + if (parentBitSet == null) { + return null; // no matches } - }); - } - } - if (parentBitSet != null) { - // if nested docs are involved we also need to mark all child docs that belong to a matching parent doc. - markChildDocs(parentBitSet, bitSet); - } - } + } + if (indexMetadata.isRoutingPartitionedIndex()) { + // this is the heaviest invariant. Here we have to visit all docs stored fields do extract _id and _routing + // this index is routing partitioned. + Visitor visitor = new Visitor(leafReader); + TwoPhaseIterator twoPhaseIterator = parentBitSet == null + ? new RoutingPartitionedDocIdSetIterator(visitor) + : new NestedRoutingPartitionedDocIdSetIterator(visitor, parentBitSet); + return new ConstantScoreScorer(score(), scoreMode, twoPhaseIterator); + } else { + // here we potentially guard the docID consumers with our parent bitset if we have one. + // this ensures that we are only marking root documents in the nested case and if necessary + // we do a second pass to mark the corresponding children in markChildDocs + Function maybeWrapConsumer = consumer -> { + if (parentBitSet != null) { + return docId -> { + if (parentBitSet.get(docId)) { + consumer.accept(docId); + } + }; + } + return consumer; + }; + // in the _routing case we first go and find all docs that have a routing value and mark the ones we have to + // delete + findSplitDocs(RoutingFieldMapper.NAME, ref -> { + int targetShardId = indexRouting.getShard(null, ref.utf8ToString()); + return shardId == targetShardId; + }, leafReader, maybeWrapConsumer.apply(bitSet::set)); - return new ConstantScoreScorer(this, score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); + // TODO have the IndexRouting build the query and pass routingRequired in + boolean routingRequired = indexMetadata.mapping() == null + ? false + : indexMetadata.mapping().routingRequired(); + // now if we have a mixed index where some docs have a _routing value and some don't we have to exclude the + // ones + // with a routing value from the next iteration and delete / select based on the ID. + if (routingRequired == false && terms.getDocCount() != leafReader.maxDoc()) { + /* + * This is a special case where some docs don't have routing values. + * It's annoying, but it's allowed to build an index where some documents + * hve routing and others don't. + * + * Luckily, if the routing field is required in the mapping then we can + * safely assume that all documents which are don't have a routing are + * nested documents. And we pick those up later based on the assignment + * of the document that contains them. + */ + FixedBitSet hasRoutingValue = new FixedBitSet(leafReader.maxDoc()); + findSplitDocs( + RoutingFieldMapper.NAME, + Predicates.never(), + leafReader, + maybeWrapConsumer.apply(hasRoutingValue::set) + ); + IntConsumer bitSetConsumer = maybeWrapConsumer.apply(bitSet::set); + findSplitDocs(IdFieldMapper.NAME, includeInShard, leafReader, docId -> { + if (hasRoutingValue.get(docId) == false) { + bitSetConsumer.accept(docId); + } + }); + } + } + if (parentBitSet != null) { + // if nested docs are involved we also need to mark all child docs that belong to a matching parent doc. + markChildDocs(parentBitSet, bitSet); + } + } + + return new ConstantScoreScorer(score(), scoreMode, new BitSetIterator(bitSet, bitSet.length())); + } + + @Override + public long cost() { + return leafReader.maxDoc(); + } + }; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java index 3dc5953e3d3d..bc94db13074d 100644 --- a/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java +++ b/server/src/main/java/org/elasticsearch/index/store/FsDirectoryFactory.java @@ -33,6 +33,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.HashSet; import java.util.Set; +import java.util.function.BiPredicate; public class FsDirectoryFactory implements IndexStorePlugin.DirectoryFactory { @@ -67,12 +68,12 @@ public class FsDirectoryFactory implements IndexStorePlugin.DirectoryFactory { // Use Lucene defaults final FSDirectory primaryDirectory = FSDirectory.open(location, lockFactory); if (primaryDirectory instanceof MMapDirectory mMapDirectory) { - return new HybridDirectory(lockFactory, setPreload(mMapDirectory, lockFactory, preLoadExtensions)); + return new HybridDirectory(lockFactory, setPreload(mMapDirectory, preLoadExtensions)); } else { return primaryDirectory; } case MMAPFS: - return setPreload(new MMapDirectory(location, lockFactory), lockFactory, preLoadExtensions); + return setPreload(new MMapDirectory(location, lockFactory), preLoadExtensions); case SIMPLEFS: case NIOFS: return new NIOFSDirectory(location, lockFactory); @@ -81,17 +82,23 @@ public class FsDirectoryFactory implements IndexStorePlugin.DirectoryFactory { } } - public static MMapDirectory setPreload(MMapDirectory mMapDirectory, LockFactory lockFactory, Set preLoadExtensions) - throws IOException { - assert mMapDirectory.getPreload() == false; + /** Sets the preload, if any, on the given directory based on the extensions. Returns the same directory instance. */ + // visibility and extensibility for testing + public MMapDirectory setPreload(MMapDirectory mMapDirectory, Set preLoadExtensions) { + mMapDirectory.setPreload(getPreloadFunc(preLoadExtensions)); + return mMapDirectory; + } + + /** Gets a preload function based on the given preLoadExtensions. */ + static BiPredicate getPreloadFunc(Set preLoadExtensions) { if (preLoadExtensions.isEmpty() == false) { if (preLoadExtensions.contains("*")) { - mMapDirectory.setPreload(true); + return MMapDirectory.ALL_FILES; } else { - return new PreLoadMMapDirectory(mMapDirectory, lockFactory, preLoadExtensions); + return (name, context) -> preLoadExtensions.contains(FileSwitchDirectory.getExtension(name)); } } - return mMapDirectory; + return MMapDirectory.NO_FILES; } /** @@ -116,6 +123,8 @@ public class FsDirectoryFactory implements IndexStorePlugin.DirectoryFactory { // we need to do these checks on the outer directory since the inner doesn't know about pending deletes ensureOpen(); ensureCanRead(name); + // we switch the context here since mmap checks for the READONCE context by identity + context = context == Store.READONCE_CHECKSUM ? IOContext.READONCE : context; // we only use the mmap to open inputs. Everything else is managed by the NIOFSDirectory otherwise // we might run into trouble with files that are pendingDelete in one directory but still // listed in listAll() from the other. We on the other hand don't want to list files from both dirs @@ -162,50 +171,4 @@ public class FsDirectoryFactory implements IndexStorePlugin.DirectoryFactory { return delegate; } } - - // TODO it would be nice to share code between PreLoadMMapDirectory and HybridDirectory but due to the nesting aspect of - // directories here makes it tricky. It would be nice to allow MMAPDirectory to pre-load on a per IndexInput basis. - static final class PreLoadMMapDirectory extends MMapDirectory { - private final MMapDirectory delegate; - private final Set preloadExtensions; - - PreLoadMMapDirectory(MMapDirectory delegate, LockFactory lockFactory, Set preload) throws IOException { - super(delegate.getDirectory(), lockFactory); - super.setPreload(false); - this.delegate = delegate; - this.delegate.setPreload(true); - this.preloadExtensions = preload; - assert getPreload() == false; - } - - @Override - public void setPreload(boolean preload) { - throw new IllegalArgumentException("can't set preload on a preload-wrapper"); - } - - @Override - public IndexInput openInput(String name, IOContext context) throws IOException { - if (useDelegate(name)) { - // we need to do these checks on the outer directory since the inner doesn't know about pending deletes - ensureOpen(); - ensureCanRead(name); - return delegate.openInput(name, context); - } - return super.openInput(name, context); - } - - @Override - public synchronized void close() throws IOException { - IOUtils.close(super::close, delegate); - } - - boolean useDelegate(String name) { - final String extension = FileSwitchDirectory.getExtension(name); - return preloadExtensions.contains(extension); - } - - MMapDirectory getDelegate() { - return delegate; - } - } } diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index a1038356735f..c3d21b23d6a4 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -33,6 +33,7 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.Lock; import org.apache.lucene.store.NIOFSDirectory; +import org.apache.lucene.store.ReadAdvice; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Version; @@ -147,7 +148,15 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref * Specific {@link IOContext} indicating that we will read only the Lucene file footer (containing the file checksum) * See {@link MetadataSnapshot#checksumFromLuceneFile}. */ - public static final IOContext READONCE_CHECKSUM = new IOContext(IOContext.READONCE, true); + public static final IOContext READONCE_CHECKSUM = createReadOnceContext(); + + // while equivalent, these different read once contexts are checked by identity in directory implementations + private static IOContext createReadOnceContext() { + var context = IOContext.READONCE.withReadAdvice(ReadAdvice.SEQUENTIAL); + assert context != IOContext.READONCE; + assert context.equals(IOContext.READONCE); + return context; + } private final AtomicBoolean isClosed = new AtomicBoolean(false); private final StoreDirectory directory; @@ -632,7 +641,7 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref List ex = new ArrayList<>(); for (String file : files) { if (file.startsWith(CORRUPTED_MARKER_NAME_PREFIX)) { - try (ChecksumIndexInput input = directory.openChecksumInput(file, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(file)) { CodecUtil.checkHeader(input, CODEC, CORRUPTED_MARKER_CODEC_VERSION, CORRUPTED_MARKER_CODEC_VERSION); final int size = input.readVInt(); final byte[] buffer = new byte[size]; @@ -919,7 +928,10 @@ public class Store extends AbstractIndexShardComponent implements Closeable, Ref boolean readFileAsHash, BytesRef writerUuid ) throws IOException { - try (IndexInput in = directory.openInput(file, READONCE_CHECKSUM)) { + // We select the read once context carefully here since these constants, while equivalent are + // checked by identity in the different directory implementations. + var context = file.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : READONCE_CHECKSUM; + try (IndexInput in = directory.openInput(file, context)) { final long length = in.length(); if (length < CodecUtil.footerLength()) { // If the file isn't long enough to contain the footer then verifying it triggers an IAE, but really it's corrupted diff --git a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java index 2be9d0f224e2..501c2496aacb 100644 --- a/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java +++ b/server/src/main/java/org/elasticsearch/index/store/StoreFileMetadata.java @@ -195,7 +195,7 @@ public class StoreFileMetadata implements Writeable { * * This ID may be {@link StoreFileMetadata#UNAVAILABLE_WRITER_UUID} (i.e. zero-length) if unavailable, e.g.: * - * - The file was written by a version of Lucene prior to {@link org.apache.lucene.util.Version#LUCENE_8_6_0}. + * - The file was written by a version of Lucene prior to 8.6.0. * - The metadata came from a version of Elasticsearch prior to {@link StoreFileMetadata#WRITER_UUID_MIN_VERSION}). * - The file is not one of the files listed above. * diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 763abb41797b..db84be817bbd 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -96,7 +96,7 @@ public class TermVectorsService { /* or from an existing document */ else if (docIdAndVersion != null) { // fields with stored term vectors - termVectorsByField = docIdAndVersion.reader.getTermVectors(docIdAndVersion.docId); + termVectorsByField = docIdAndVersion.reader.termVectors().get(docIdAndVersion.docId); Set selectedFields = request.selectedFields(); // generate tvs for fields where analyzer is overridden if (selectedFields == null && request.perFieldAnalyzer() != null) { @@ -301,7 +301,7 @@ public class TermVectorsService { } } /* and read vectors from it */ - return index.createSearcher().getIndexReader().getTermVectors(0); + return index.createSearcher().getIndexReader().termVectors().get(0); } private static Fields generateTermVectorsFromDoc(IndexShard indexShard, TermVectorsRequest request) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java index 29161814e772..525da1670f90 100644 --- a/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/AssociatedIndexDescriptor.java @@ -94,7 +94,7 @@ public class AssociatedIndexDescriptor implements IndexPatternMatcher { String output = pattern; output = output.replace(".", "\\."); output = output.replace("*", ".*"); - return new RegExp(output).toAutomaton(); + return new RegExp(output, RegExp.ALL | RegExp.ALL).toAutomaton(); } /** diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index abba6ec6ae68..9bca59e9e4d6 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -12,13 +12,11 @@ package org.elasticsearch.indices; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.LRUQueryCache; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryCache; import org.apache.lucene.search.QueryCachingPolicy; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.common.lucene.ShardCoreKeyMap; @@ -173,24 +171,12 @@ public class IndicesQueryCache implements QueryCache, Closeable { return in.count(context); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.scorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); return in.scorerSupplier(context); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - shardKeyMap.add(context.reader()); - return in.bulkScorer(context); - } - @Override public boolean isCacheable(LeafReaderContext ctx) { return in.isCacheable(ctx); diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java index 08148de0591c..f3456870114f 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexDescriptor.java @@ -41,6 +41,8 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT; + /** * Uses a pattern string to define a protected space for indices belonging to a system feature, and, if needed, provides metadata for * managing indices that match the pattern. @@ -360,7 +362,7 @@ public class SystemIndexDescriptor implements IndexPatternMatcher, Comparable new CharacterRunAutomaton( - MinimizationOperations.minimize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) - ) + entry -> new CharacterRunAutomaton(Operations.determinize(entry.getValue(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)) ) ); } @@ -426,7 +423,7 @@ public class SystemIndices { .stream() .map(SystemIndices::featureToIndexAutomaton) .reduce(Operations::union); - return MinimizationOperations.minimize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } private static CharacterRunAutomaton buildNetNewIndexCharacterRunAutomaton(Map featureDescriptors) { @@ -437,9 +434,7 @@ public class SystemIndices { .filter(SystemIndexDescriptor::isNetNew) .map(descriptor -> SystemIndexDescriptor.buildAutomaton(descriptor.getIndexPattern(), descriptor.getAliasName())) .reduce(Operations::union); - return new CharacterRunAutomaton( - MinimizationOperations.minimize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) - ); + return new CharacterRunAutomaton(Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)); } private static Automaton featureToIndexAutomaton(Feature feature) { @@ -459,7 +454,7 @@ public class SystemIndices { .map(dsName -> SystemIndexDescriptor.buildAutomaton(dsName, null)) .reduce(Operations::union); - return automaton.isPresent() ? MinimizationOperations.minimize(automaton.get(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) : EMPTY; + return automaton.isPresent() ? Operations.determinize(automaton.get(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) : EMPTY; } private static Predicate buildDataStreamNamePredicate(Map featureDescriptors) { @@ -472,7 +467,7 @@ public class SystemIndices { .stream() .map(SystemIndices::featureToDataStreamBackingIndicesAutomaton) .reduce(Operations::union); - return MinimizationOperations.minimize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return Operations.determinize(automaton.orElse(EMPTY), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } private static Automaton featureToDataStreamBackingIndicesAutomaton(Feature feature) { diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 556bf43425bd..fbfed8a75a14 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -1376,7 +1376,7 @@ public class RecoverySourceHandler { // we already have the file contents on heap no need to open the file again currentInput = null; } else { - currentInput = store.directory().openInput(md.name(), IOContext.READ); + currentInput = store.directory().openInput(md.name(), IOContext.DEFAULT); } } diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java b/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java index 49041566a857..fe076c7155c8 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/GroupingDocValuesSelector.java @@ -199,12 +199,12 @@ abstract class GroupingDocValuesSelector extends GroupSelector { @Override public boolean advanceExact(int target) throws IOException { if (sorted.advanceExact(target)) { - ord = (int) sorted.nextOrd(); - if (sorted.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + if (sorted.docValueCount() > 1) { throw new IllegalStateException( "failed to extract doc:" + target + ", the grouping field must be single valued" ); } + ord = (int) sorted.nextOrd(); return true; } else { return false; diff --git a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java index ed07525c1dd7..443963dd59dc 100644 --- a/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java +++ b/server/src/main/java/org/elasticsearch/lucene/grouping/TopFieldGroups.java @@ -170,10 +170,10 @@ public final class TopFieldGroups extends TopFieldDocs { final TopFieldGroups shard = shardHits[shardIDX]; // totalHits can be non-zero even if no hits were // collected, when searchAfter was used: - totalHitCount += shard.totalHits.value; + totalHitCount += shard.totalHits.value(); // If any hit count is a lower bound then the merged // total hit count is a lower bound as well - if (shard.totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { + if (shard.totalHits.relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) { totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO; } if (CollectionUtils.isEmpty(shard.scoreDocs) == false) { diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java index dca4ff503c78..67ece200c06e 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BinaryDocValuesRangeQuery.java @@ -18,7 +18,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -61,7 +61,7 @@ public final class BinaryDocValuesRangeQuery extends Query { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final BinaryDocValues values = context.reader().getBinaryDocValues(fieldName); if (values == null) { return null; @@ -106,7 +106,8 @@ public final class BinaryDocValuesRangeQuery extends Query { return 4; // at most 4 comparisons } }; - return new ConstantScoreScorer(this, score(), scoreMode, iterator); + + return new DefaultScorerSupplier(new ConstantScoreScorer(score(), scoreMode, iterator)); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java index c75c6e2373f2..788bf76087d1 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/BlendedTermQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.IOSupplier; import org.apache.lucene.util.InPlaceMergeSorter; import java.io.IOException; @@ -188,7 +189,11 @@ public abstract class BlendedTermQuery extends Query { int df = termContext.docFreq(); long ttf = sumTTF; for (int i = 0; i < len; i++) { - TermState termState = termContext.get(leaves.get(i)); + IOSupplier termStateSupplier = termContext.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + TermState termState = termStateSupplier.get(); if (termState == null) { continue; } @@ -212,7 +217,11 @@ public abstract class BlendedTermQuery extends Query { } TermStates newCtx = new TermStates(readerContext); for (int i = 0; i < len; ++i) { - TermState termState = ctx.get(leaves.get(i)); + IOSupplier termStateSupplier = ctx.get(leaves.get(i)); + if (termStateSupplier == null) { + continue; + } + TermState termState = termStateSupplier.get(); if (termState == null) { continue; } diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java index 8e85c1d97438..13b0bf650a39 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/MinDocQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -76,15 +77,17 @@ public final class MinDocQuery extends Query { throw new IllegalStateException("Executing against a different reader than the query has been rewritten against"); } return new ConstantScoreWeight(this, boost) { + @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final int maxDoc = context.reader().maxDoc(); if (context.docBase + maxDoc <= minDoc) { return null; } final int segmentMinDoc = Math.max(0, minDoc - context.docBase); final DocIdSetIterator disi = new MinDocIterator(segmentMinDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java index 1a8ac203f0cb..6575f7f416bd 100644 --- a/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/queries/SearchAfterSortedDocQuery.java @@ -22,6 +22,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.Weight; @@ -67,8 +68,8 @@ public class SearchAfterSortedDocQuery extends Query { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Sort segmentSort = context.reader().getMetaData().getSort(); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Sort segmentSort = context.reader().getMetaData().sort(); if (segmentSort == null || Lucene.canEarlyTerminate(sort, segmentSort) == false) { throw new IOException("search sort :[" + sort + "] does not match the index sort:[" + segmentSort + "]"); } @@ -80,7 +81,8 @@ public class SearchAfterSortedDocQuery extends Query { return null; } final DocIdSetIterator disi = new MinDocQuery.MinDocIterator(firstDoc, maxDoc); - return new ConstantScoreScorer(this, score(), scoreMode, disi); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, disi); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java index bd64ee88fc30..064f8ef3eacd 100644 --- a/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java +++ b/server/src/main/java/org/elasticsearch/lucene/spatial/ShapeDocValuesQuery.java @@ -109,14 +109,8 @@ abstract class ShapeDocValuesQuery extends Query { final Component2D component2D = create(geometries); return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return scorerSupplier(context).get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - final Weight weight = this; // implement ScorerSupplier, since we do some expensive stuff to make a scorer return new ScorerSupplier() { @@ -125,7 +119,7 @@ abstract class ShapeDocValuesQuery extends Query { // binary doc values allocate an array upfront, lets only allocate it if we are going to use it final BinaryDocValues values = context.reader().getBinaryDocValues(field); if (values == null) { - return new ConstantScoreScorer(weight, 0f, scoreMode, DocIdSetIterator.empty()); + return new ConstantScoreScorer(0f, scoreMode, DocIdSetIterator.empty()); } final GeometryDocValueReader reader = new GeometryDocValueReader(); final Component2DVisitor visitor = Component2DVisitor.getVisitor(component2D, relation, encoder); @@ -143,7 +137,7 @@ abstract class ShapeDocValuesQuery extends Query { return 1000f; // TODO: what should it be? } }; - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override @@ -167,14 +161,8 @@ abstract class ShapeDocValuesQuery extends Query { } return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return scorerSupplier(context).get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - final Weight weight = this; // implement ScorerSupplier, since we do some expensive stuff to make a scorer return new ScorerSupplier() { @@ -183,7 +171,7 @@ abstract class ShapeDocValuesQuery extends Query { // binary doc values allocate an array upfront, lets only allocate it if we are going to use it final BinaryDocValues values = context.reader().getBinaryDocValues(field); if (values == null) { - return new ConstantScoreScorer(weight, 0f, scoreMode, DocIdSetIterator.empty()); + return new ConstantScoreScorer(0f, scoreMode, DocIdSetIterator.empty()); } final Component2DVisitor[] visitors = new Component2DVisitor[components2D.size()]; for (int i = 0; i < components2D.size(); i++) { @@ -210,7 +198,7 @@ abstract class ShapeDocValuesQuery extends Query { return 1000f; // TODO: what should it be? } }; - return new ConstantScoreScorer(weight, score(), scoreMode, iterator); + return new ConstantScoreScorer(score(), scoreMode, iterator); } @Override diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 9b33ac2ea12f..8e6648632957 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -415,8 +415,8 @@ class NodeConstruction { Constants.OS_ARCH, Constants.JVM_VENDOR, Constants.JVM_NAME, - Constants.JAVA_VERSION, - Constants.JVM_VERSION + System.getProperty("java.version"), + Runtime.version().toString() ); logger.info("JVM home [{}], using bundled JDK [{}]", System.getProperty("java.home"), jvmInfo.getUsingBundledJdk()); logger.info("JVM arguments {}", Arrays.toString(jvmInfo.getInputArguments())); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 5df7a8ea20f5..435bf71e3b2c 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -4029,7 +4029,7 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp final String file = fileInfo.physicalName(); try ( Releasable ignored = context.withCommitRef(); - IndexInput indexInput = store.openVerifyingInput(file, IOContext.READ, fileInfo.metadata()) + IndexInput indexInput = store.openVerifyingInput(file, IOContext.DEFAULT, fileInfo.metadata()) ) { for (int i = 0; i < fileInfo.numberOfParts(); i++) { final long partBytes = fileInfo.partBytes(i); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java index a457ebb67fd4..09f31abb58eb 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java @@ -72,7 +72,7 @@ public class RestCountAction extends AbstractCatAction { return channel -> client.search(countRequest, new RestResponseListener(channel) { @Override public RestResponse buildResponse(SearchResponse countResponse) throws Exception { - assert countResponse.getHits().getTotalHits().relation == TotalHits.Relation.EQUAL_TO; + assert countResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO; return RestTable.buildResponse(buildTable(request, countResponse), channel); } }); @@ -90,7 +90,7 @@ public class RestCountAction extends AbstractCatAction { private Table buildTable(RestRequest request, SearchResponse response) { Table table = getTableWithHeader(request); table.startRow(); - table.addCell(response.getHits().getTotalHits().value); + table.addCell(response.getHits().getTotalHits().value()); table.endRow(); return table; diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java index 23da666a39a7..c1a55874bfc5 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestCountAction.java @@ -87,7 +87,7 @@ public class RestCountAction extends BaseRestHandler { if (terminateAfter != DEFAULT_TERMINATE_AFTER) { builder.field("terminated_early", response.isTerminatedEarly()); } - builder.field("count", response.getHits().getTotalHits().value); + builder.field("count", response.getHits().getTotalHits().value()); buildBroadcastShardsHeader( builder, request, diff --git a/server/src/main/java/org/elasticsearch/script/ScoreScript.java b/server/src/main/java/org/elasticsearch/script/ScoreScript.java index c8129717b5cc..6c7d36ee9a43 100644 --- a/server/src/main/java/org/elasticsearch/script/ScoreScript.java +++ b/server/src/main/java/org/elasticsearch/script/ScoreScript.java @@ -116,6 +116,11 @@ public abstract class ScoreScript extends DocBasedScript { this.docId = docid; } + /** Get the current document. */ + public int docId() { + return docId; + } + public void setScorer(Scorable scorer) { this.scoreSupplier = () -> { try { diff --git a/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java b/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java index c80a2e7200ec..d83530e82b16 100644 --- a/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java +++ b/server/src/main/java/org/elasticsearch/script/SortedSetDocValuesStringFieldScript.java @@ -46,9 +46,8 @@ public class SortedSetDocValuesStringFieldScript extends StringFieldScript { public void execute() { try { if (hasValue) { - long ord; - while ((ord = sortedSetDocValues.nextOrd()) != SortedSetDocValues.NO_MORE_ORDS) { - BytesRef bytesRef = sortedSetDocValues.lookupOrd(ord); + for (int i = 0; i < sortedSetDocValues.docValueCount(); i++) { + BytesRef bytesRef = sortedSetDocValues.lookupOrd(sortedSetDocValues.nextOrd()); emit(bytesRef.utf8ToString()); } } diff --git a/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java index 6297fbaa2318..d9550dd17a05 100644 --- a/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/IpDocValuesField.java @@ -157,7 +157,8 @@ public class IpDocValuesField extends AbstractScriptFieldFactory public void setNextDocId(int docId) throws IOException { count = 0; if (in.advanceExact(docId)) { - for (long ord = in.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = in.nextOrd()) { + for (int i = 0; i < in.docValueCount(); i++) { + long ord = in.nextOrd(); ords = ArrayUtil.grow(ords, count + 1); ords[count++] = ord; } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java index fd7c5227e22a..be1b972dcd41 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/ByteKnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.index.ByteVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; import org.elasticsearch.index.mapper.vectors.DenseVectorScriptDocValues; @@ -19,7 +20,8 @@ import java.io.IOException; import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; public class ByteKnnDenseVectorDocValuesField extends DenseVectorDocValuesField { - protected ByteVectorValues input; // null if no vectors + protected final ByteVectorValues input; // null if no vectors + protected final KnnVectorValues.DocIndexIterator iterator; // null if no vectors protected byte[] vector; protected final int dims; @@ -31,6 +33,7 @@ public class ByteKnnDenseVectorDocValuesField extends DenseVectorDocValuesField super(name, elementType); this.dims = dims; this.input = input; + this.iterator = input == null ? null : input.iterator(); } @Override @@ -38,15 +41,15 @@ public class ByteKnnDenseVectorDocValuesField extends DenseVectorDocValuesField if (input == null) { return; } - int currentDoc = input.docID(); + int currentDoc = iterator.docID(); if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { vector = null; } else if (docId == currentDoc) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { - currentDoc = input.advance(docId); + currentDoc = iterator.advance(docId); if (currentDoc == docId) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { vector = null; } diff --git a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java index c7678b03dd8c..3e3809220051 100644 --- a/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java +++ b/server/src/main/java/org/elasticsearch/script/field/vectors/KnnDenseVectorDocValuesField.java @@ -10,6 +10,7 @@ package org.elasticsearch.script.field.vectors; import org.apache.lucene.index.FloatVectorValues; +import org.apache.lucene.index.KnnVectorValues; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.vectors.DenormalizedCosineFloatVectorValues; import org.elasticsearch.index.mapper.vectors.DenseVectorFieldMapper.ElementType; @@ -20,7 +21,8 @@ import java.io.IOException; import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; public class KnnDenseVectorDocValuesField extends DenseVectorDocValuesField { - protected FloatVectorValues input; // null if no vectors + protected final FloatVectorValues input; // null if no vectors + protected final KnnVectorValues.DocIndexIterator iterator; protected float[] vector; protected final int dims; @@ -28,6 +30,7 @@ public class KnnDenseVectorDocValuesField extends DenseVectorDocValuesField { super(name, ElementType.FLOAT); this.dims = dims; this.input = input; + this.iterator = input == null ? null : input.iterator(); } @Override @@ -35,15 +38,15 @@ public class KnnDenseVectorDocValuesField extends DenseVectorDocValuesField { if (input == null) { return; } - int currentDoc = input.docID(); + int currentDoc = iterator.docID(); if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { vector = null; } else if (docId == currentDoc) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { - currentDoc = input.advance(docId); + currentDoc = iterator.advance(docId); if (currentDoc == docId) { - vector = input.vectorValue(); + vector = input.vectorValue(iterator.index()); } else { vector = null; } diff --git a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java index 49480816bbbb..5ac25fe0ff69 100644 --- a/server/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/server/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -477,11 +477,11 @@ public enum MultiValueMode implements Writeable { @Override protected int pick(SortedSetDocValues values) throws IOException { - long maxOrd = -1; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { - maxOrd = ord; + int count = values.docValueCount(); + for (int i = 0; i < count - 1; ++i) { + values.nextOrd(); } - return Math.toIntExact(maxOrd); + return Math.toIntExact(values.nextOrd()); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java index 6a89d66bb341..beac39c2de30 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchFeatures.java +++ b/server/src/main/java/org/elasticsearch/search/SearchFeatures.java @@ -16,8 +16,11 @@ import org.elasticsearch.search.vectors.KnnVectorQueryBuilder; import java.util.Set; public final class SearchFeatures implements FeatureSpecification { + + public static final NodeFeature LUCENE_10_0_0_UPGRADE = new NodeFeature("lucene_10_upgrade"); + @Override public Set getFeatures() { - return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED); + return Set.of(KnnVectorQueryBuilder.K_PARAM_SUPPORTED, LUCENE_10_0_0_UPGRADE); } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchHits.java b/server/src/main/java/org/elasticsearch/search/SearchHits.java index 8ff5de3c9b8a..896dd7f99994 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHits.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHits.java @@ -288,12 +288,12 @@ public final class SearchHits implements Writeable, ChunkedToXContent, RefCounte return Iterators.concat(Iterators.single((b, p) -> b.startObject(Fields.HITS)), Iterators.single((b, p) -> { boolean totalHitAsInt = params.paramAsBoolean(RestSearchAction.TOTAL_HITS_AS_INT_PARAM, false); if (totalHitAsInt) { - long total = totalHits == null ? -1 : totalHits.value; + long total = totalHits == null ? -1 : totalHits.value(); b.field(Fields.TOTAL, total); } else if (totalHits != null) { b.startObject(Fields.TOTAL); - b.field("value", totalHits.value); - b.field("relation", totalHits.relation == Relation.EQUAL_TO ? "eq" : "gte"); + b.field("value", totalHits.value()); + b.field("relation", totalHits.relation() == Relation.EQUAL_TO ? "eq" : "gte"); b.endObject(); } return b; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java index f66f6b4a3805..624db3f1cfe8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/MultiBucketCollector.java @@ -13,7 +13,6 @@ import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MultiCollector; import org.apache.lucene.search.Scorable; -import org.apache.lucene.search.ScoreCachingWrappingScorer; import org.apache.lucene.search.ScoreMode; import java.io.IOException; @@ -201,6 +200,7 @@ public class MultiBucketCollector extends BucketCollector { private final boolean cacheScores; private final LeafBucketCollector[] collectors; private int numCollectors; + private ScoreCachingScorable scorable; private MultiLeafBucketCollector(List collectors, boolean cacheScores) { this.collectors = collectors.toArray(new LeafBucketCollector[collectors.size()]); @@ -211,11 +211,11 @@ public class MultiBucketCollector extends BucketCollector { @Override public void setScorer(Scorable scorer) throws IOException { if (cacheScores) { - scorer = ScoreCachingWrappingScorer.wrap(scorer); + scorable = new ScoreCachingScorable(scorer); } for (int i = 0; i < numCollectors; ++i) { final LeafCollector c = collectors[i]; - c.setScorer(scorer); + c.setScorer(cacheScores ? scorable : scorer); } } @@ -227,6 +227,9 @@ public class MultiBucketCollector extends BucketCollector { @Override public void collect(int doc, long bucket) throws IOException { + if (scorable != null) { + scorable.curDoc = doc; + } final LeafBucketCollector[] collectors = this.collectors; int numCollectors = this.numCollectors; for (int i = 0; i < numCollectors;) { @@ -244,4 +247,25 @@ public class MultiBucketCollector extends BucketCollector { } } } + + private static class ScoreCachingScorable extends Scorable { + + private final Scorable in; + private int curDoc = -1; // current document + private int scoreDoc = -1; // document that score was computed on + private float score; + + ScoreCachingScorable(Scorable in) { + this.in = in; + } + + @Override + public float score() throws IOException { + if (curDoc != scoreDoc) { + score = in.score(); + scoreDoc = curDoc; + } + return score; + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index 2e9e04eca4af..9ee15306ce63 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -278,7 +278,7 @@ public final class CompositeAggregator extends BucketsAggregator implements Size * optimization and null if index sort is not applicable. */ private Sort buildIndexSortPrefix(LeafReaderContext context) throws IOException { - Sort indexSort = context.reader().getMetaData().getSort(); + Sort indexSort = context.reader().getMetaData().sort(); if (indexSort == null) { return null; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java index 0d0d2c6f922e..dcc2ad52cbc5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/GlobalOrdinalValuesSource.java @@ -41,8 +41,6 @@ import java.util.ArrayDeque; import java.util.List; import java.util.function.BiConsumer; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; - /** * A {@link SingleDimensionValuesSource} for global ordinals. */ @@ -247,9 +245,8 @@ class GlobalOrdinalValuesSource extends SingleDimensionValuesSource { @Override public void collect(int doc, long bucket) throws IOException { if (dvs.advanceExact(doc)) { - long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { - currentValue = ord; + for (int i = 0; i < dvs.docValueCount(); i++) { + currentValue = dvs.nextOrd(); next.collect(doc, bucket); } } else if (missingBucket) { @@ -306,8 +303,8 @@ class GlobalOrdinalValuesSource extends SingleDimensionValuesSource { public void collect(int doc, long bucket) throws IOException { if (currentValueIsSet == false) { if (dvs.advanceExact(doc)) { - long ord; - while ((ord = dvs.nextOrd()) != NO_MORE_ORDS) { + for (int i = 0; i < dvs.docValueCount(); i++) { + long ord = dvs.nextOrd(); if (term.equals(lookup.lookupOrd(ord))) { currentValueIsSet = true; currentValue = ord; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java index f774f67b3df8..af4d60bf424a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/countedterms/CountedTermsAggregator.java @@ -39,7 +39,6 @@ import java.util.function.BiConsumer; import java.util.function.Supplier; import static java.util.Collections.emptyList; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; class CountedTermsAggregator extends TermsAggregator { @@ -77,7 +76,8 @@ class CountedTermsAggregator extends TermsAggregator { @Override public void collect(int doc, long owningBucketOrd) throws IOException { if (ords.advanceExact(doc)) { - for (long ord = ords.nextOrd(); ord != NO_MORE_ORDS; ord = ords.nextOrd()) { + for (int i = 0; i < ords.docValueCount(); i++) { + long ord = ords.nextOrd(); collectOrdinal(bucketOrds.add(owningBucketOrd, ords.lookupOrd(ord)), doc, sub); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java index 2969b7bf82c8..7dd192b317a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/MergedPointRangeQuery.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PointValues; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.ConstantScoreWeight; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchNoDocsQuery; @@ -21,7 +20,6 @@ import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; -import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; @@ -117,15 +115,6 @@ public class MergedPointRangeQuery extends Query { return multiValuedSegmentWeight().count(context); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { /* @@ -144,19 +133,6 @@ public class MergedPointRangeQuery extends Query { return multiValuedSegmentWeight().scorerSupplier(context); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - PointValues points = context.reader().getPointValues(field); - if (points == null) { - return null; - } - if (points.size() == points.getDocCount()) { - // Each doc that has points has exactly one point. - return singleValuedSegmentWeight().bulkScorer(context); - } - return multiValuedSegmentWeight().bulkScorer(context); - } - private Weight singleValuedSegmentWeight() throws IOException { if (singleValuedSegmentWeight == null) { singleValuedSegmentWeight = delegateForSingleValuedSegments.createWeight(searcher, scoreMode, boost); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java index 282c09c84414..e8e33655d47c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/QueryToFilterAdapter.java @@ -14,6 +14,7 @@ import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery; @@ -215,7 +216,7 @@ public class QueryToFilterAdapter { // No hits in this segment. return 0; } - scorer.score(counter, live); + scorer.score(counter, live, 0, DocIdSetIterator.NO_MORE_DOCS); return counter.readAndReset(ctx); } @@ -228,7 +229,7 @@ public class QueryToFilterAdapter { // No hits in this segment. return; } - scorer.score(collector, live); + scorer.score(collector, live, 0, DocIdSetIterator.NO_MORE_DOCS); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java index 4c87b5961ac1..b5d3485e72f8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/global/GlobalAggregator.java @@ -9,6 +9,7 @@ package org.elasticsearch.search.aggregations.bucket.global; import org.apache.lucene.search.BulkScorer; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Scorable; @@ -45,6 +46,7 @@ public final class GlobalAggregator extends BucketsAggregator implements SingleB return LeafBucketCollector.NO_OP_COLLECTOR; } grow(1); + scorer.score(new LeafCollector() { @Override public void collect(int doc) throws IOException { @@ -55,7 +57,7 @@ public final class GlobalAggregator extends BucketsAggregator implements SingleB public void setScorer(Scorable scorer) throws IOException { sub.setScorer(scorer); } - }, aggCtx.getLeafReaderContext().reader().getLiveDocs()); + }, aggCtx.getLeafReaderContext().reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); return LeafBucketCollector.NO_OP_COLLECTOR; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 12182a5931a4..0fbb9745aa40 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -197,7 +197,6 @@ public class NestedAggregator extends BucketsAggregator implements SingleBucketA } for (; childDocId < currentParentDoc; childDocId = childDocs.nextDoc()) { - cachedScorer.doc = childDocId; for (var bucket : bucketBuffer) { collectBucket(sub, childDocId, bucket); } @@ -207,19 +206,12 @@ public class NestedAggregator extends BucketsAggregator implements SingleBucketA } private static class CachedScorable extends Scorable { - int doc; float score; @Override public final float score() { return score; } - - @Override - public int docID() { - return doc; - } - } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 2f18d2dc1e42..6119af3cb6a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -157,7 +157,8 @@ public final class BinaryRangeAggregator extends BucketsAggregator { this.collector = (doc, bucket) -> { if (values.advanceExact(doc)) { int lo = 0; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); lo = collect(doc, ord, bucket, lo); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java index 0cfad5ba9e0c..37cee75c11b4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/BestDocsDeferringCollector.java @@ -15,7 +15,7 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; -import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.util.BigArrays; @@ -99,7 +99,7 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme // Designed to be overridden by subclasses that may score docs by criteria // other than Lucene score protected TopDocsCollector createTopDocsCollector(int size) throws IOException { - return TopScoreDocCollector.create(size, Integer.MAX_VALUE); + return new TopScoreDocCollectorManager(size, null, Integer.MAX_VALUE, false).newCollector(); } // Can be overridden by subclasses that have a different priority queue implementation @@ -214,7 +214,6 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme private final AggregationExecutionContext aggCtx; int maxDocId = Integer.MIN_VALUE; private float currentScore; - private int currentDocId = -1; private Scorable currentScorer; PerSegmentCollects(AggregationExecutionContext aggCtx) throws IOException { @@ -249,7 +248,6 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme leafCollector.setScorer(this); currentScore = 0; - currentDocId = -1; if (maxDocId < 0) { return; } @@ -259,7 +257,6 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme int rebased = scoreDoc.doc - aggCtx.getLeafReaderContext().docBase; if ((rebased >= 0) && (rebased <= maxDocId)) { currentScore = scoreDoc.score; - currentDocId = rebased; // We stored the bucket ID in Lucene's shardIndex property // for convenience. leafCollector.collect(rebased, scoreDoc.shardIndex); @@ -276,11 +273,6 @@ public class BestDocsDeferringCollector extends DeferringBucketCollector impleme return currentScore; } - @Override - public int docID() { - return currentDocId; - } - public void collect(int docId, long parentBucket) throws IOException { perBucketSamples = bigArrays.grow(perBucketSamples, parentBucket + 1); PerParentBucketSamples sampler = perBucketSamples.get((int) parentBucket); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java index ef4101892a46..539b9440cea2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java @@ -118,12 +118,12 @@ public class DiversifiedOrdinalsSamplerAggregator extends SamplerAggregator { @Override public boolean advanceExact(int target) throws IOException { if (globalOrds.advanceExact(target)) { - value = globalOrds.nextOrd(); // Check there isn't a second value for this // document - if (globalOrds.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + if (globalOrds.docValueCount() > 1) { throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); } + value = globalOrds.nextOrd(); return true; } else { return false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java index ed39f41d9dae..89fe1a53a01c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplingQuery.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -76,15 +77,15 @@ public final class RandomSamplingQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final SplittableRandom random = new SplittableRandom(BitMixer.mix(hash ^ seed)); int maxDoc = context.reader().maxDoc(); - return new ConstantScoreScorer( - this, + Scorer scorer = new ConstantScoreScorer( boost, ScoreMode.COMPLETE_NO_SCORES, new RandomSamplingIterator(maxDoc, p, random::nextInt) ); + return new DefaultScorerSupplier(scorer); } }; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 1b0ec8e35608..0f7c61dc9f25 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -53,7 +53,6 @@ import java.util.function.Function; import java.util.function.LongPredicate; import java.util.function.LongUnaryOperator; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; import static org.elasticsearch.search.aggregations.InternalOrder.isKeyOrder; /** @@ -167,7 +166,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); collectionStrategy.collectGlobalOrd(owningBucketOrd, doc, globalOrd, sub); } } @@ -179,7 +179,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr if (false == globalOrds.advanceExact(doc)) { return; } - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (false == acceptedGlobalOrdinals.test(globalOrd)) { continue; } @@ -350,7 +351,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr if (false == segmentOrds.advanceExact(doc)) { return; } - for (long segmentOrd = segmentOrds.nextOrd(); segmentOrd != NO_MORE_ORDS; segmentOrd = segmentOrds.nextOrd()) { + for (int i = 0; i < segmentOrds.docValueCount(); i++) { + long segmentOrd = segmentOrds.nextOrd(); int docCount = docCountProvider.getDocCount(doc); segmentDocCounts.increment(segmentOrd + 1, docCount); } @@ -524,7 +526,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr if (liveDocs == null || liveDocs.get(docId)) { // document is not deleted globalOrds = globalOrds == null ? valuesSource.globalOrdinalsValues(ctx) : globalOrds; if (globalOrds.advanceExact(docId)) { - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (accepted.find(globalOrd) >= 0) { continue; } @@ -634,7 +637,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr if (liveDocs == null || liveDocs.get(docId)) { // document is not deleted globalOrds = globalOrds == null ? valuesSource.globalOrdinalsValues(ctx) : globalOrds; if (globalOrds.advanceExact(docId)) { - for (long globalOrd = globalOrds.nextOrd(); globalOrd != NO_MORE_ORDS; globalOrd = globalOrds.nextOrd()) { + for (int i = 0; i < globalOrds.docValueCount(); i++) { + long globalOrd = globalOrds.nextOrd(); if (accepted.find(globalOrd) >= 0) { continue; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java index 4d78df270474..4bcbe08ed227 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -358,8 +358,8 @@ public class IncludeExclude implements Writeable, ToXContentFragment { if (exclude != null && excludeValues != null) { throw new IllegalArgumentException(); } - this.include = include == null ? null : new RegExp(include); - this.exclude = exclude == null ? null : new RegExp(exclude); + this.include = include == null ? null : new RegExp(include, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); + this.exclude = exclude == null ? null : new RegExp(exclude, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT); this.includeValues = includeValues; this.excludeValues = excludeValues; this.incZeroBasedPartition = 0; @@ -529,7 +529,7 @@ public class IncludeExclude implements Writeable, ToXContentFragment { if (exclude != null) { a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } - return a; + return Operations.determinize(a, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } public StringFilter convertToStringFilter(DocValueFormat format) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java index 84cd86951770..05aa80f06448 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/CardinalityAggregator.java @@ -308,7 +308,8 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue bits = new BitArray(maxOrd, bigArrays); visitedOrds.set(bucketOrd, bits); } - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); bits.set((int) ord); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java index 32be4513f5c3..d0685b3a0926 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GlobalOrdCardinalityAggregator.java @@ -259,8 +259,8 @@ public class GlobalOrdCardinalityAggregator extends NumericMetricsAggregator.Sin @Override public void collect(int doc, long bucketOrd) throws IOException { if (docValues.advanceExact(doc)) { - for (long ord = docValues.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = docValues - .nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); if (bits.getAndSet(ord) == false) { competitiveIterator.onVisitedOrdinal(ord); } @@ -309,7 +309,8 @@ public class GlobalOrdCardinalityAggregator extends NumericMetricsAggregator.Sin public void collect(int doc, long bucketOrd) throws IOException { if (docValues.advanceExact(doc)) { final BitArray bits = getNewOrExistingBitArray(bucketOrd); - for (long ord = docValues.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = docValues.nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); bits.set((int) ord); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index f7f319618fa3..8ff381cbbc84 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -135,7 +135,7 @@ public class InternalTopHits extends InternalAggregation implements TopHits { maxScore = reduceAndFindMaxScore(aggregations, shardDocs); reducedTopDocs = TopDocs.merge(from, size, shardDocs); } - assert reducedTopDocs.totalHits.relation == Relation.EQUAL_TO; + assert reducedTopDocs.totalHits.relation() == Relation.EQUAL_TO; return new InternalTopHits( getName(), @@ -262,8 +262,8 @@ public class InternalTopHits extends InternalAggregation implements TopHits { InternalTopHits other = (InternalTopHits) obj; if (from != other.from) return false; if (size != other.size) return false; - if (topDocs.topDocs.totalHits.value != other.topDocs.topDocs.totalHits.value) return false; - if (topDocs.topDocs.totalHits.relation != other.topDocs.topDocs.totalHits.relation) return false; + if (topDocs.topDocs.totalHits.value() != other.topDocs.topDocs.totalHits.value()) return false; + if (topDocs.topDocs.totalHits.relation() != other.topDocs.topDocs.totalHits.relation()) return false; if (topDocs.topDocs.scoreDocs.length != other.topDocs.topDocs.scoreDocs.length) return false; for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc thisDoc = topDocs.topDocs.scoreDocs[d]; @@ -287,8 +287,8 @@ public class InternalTopHits extends InternalAggregation implements TopHits { int hashCode = super.hashCode(); hashCode = 31 * hashCode + Integer.hashCode(from); hashCode = 31 * hashCode + Integer.hashCode(size); - hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value); - hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation.hashCode(); + hashCode = 31 * hashCode + Long.hashCode(topDocs.topDocs.totalHits.value()); + hashCode = 31 * hashCode + topDocs.topDocs.totalHits.relation().hashCode(); for (int d = 0; d < topDocs.topDocs.scoreDocs.length; d++) { ScoreDoc doc = topDocs.topDocs.scoreDocs[d]; hashCode = 31 * hashCode + doc.doc; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java index d59d824bde43..90d6c298fbd2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MetricInspectionHelper.java @@ -51,7 +51,7 @@ public class MetricInspectionHelper { } public static boolean hasValue(InternalTopHits agg) { - return (agg.getHits().getTotalHits().value == 0 + return (agg.getHits().getTotalHits().value() == 0 && Double.isNaN(agg.getHits().getMaxScore()) && Double.isNaN(agg.getTopDocs().maxScore)) == false; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java index 9d8d98bc7c7c..87d8f839dfca 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregator.java @@ -19,8 +19,10 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TopScoreDocCollector; +import org.apache.lucene.search.TopScoreDocCollectorManager; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.search.MaxScoreCollector; import org.elasticsearch.common.lucene.Lucene; @@ -136,12 +138,14 @@ class TopHitsAggregator extends MetricsAggregator { // but here we create collectors ourselves and we need prevent OOM because of crazy an offset and size. topN = Math.min(topN, subSearchContext.searcher().getIndexReader().maxDoc()); if (sort == null) { - collectors = new Collectors(TopScoreDocCollector.create(topN, Integer.MAX_VALUE), null); + TopScoreDocCollector topScoreDocCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false) + .newCollector(); + collectors = new Collectors(topScoreDocCollector, null); } else { // TODO: can we pass trackTotalHits=subSearchContext.trackTotalHits(){ // Note that this would require to catch CollectionTerminatedException collectors = new Collectors( - TopFieldCollector.create(sort.sort, topN, Integer.MAX_VALUE), + new TopFieldCollectorManager(sort.sort, topN, null, Integer.MAX_VALUE, false).newCollector(), subSearchContext.trackScores() ? new MaxScoreCollector() : null ); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java index 4724bd0db05d..9b47507628dd 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java @@ -271,18 +271,17 @@ public enum MissingValues { if (hasOrds) { return values.nextOrd(); } else { - // we want to return the next missing ord but set this to - // NO_MORE_ORDS so on the next call we indicate there are no - // more values - long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; - return ordToReturn; + return nextMissingOrd; } } @Override public int docValueCount() { - return values.docValueCount(); + if (hasOrds) { + return values.docValueCount(); + } else { + return 1; + } } @Override @@ -321,7 +320,11 @@ public enum MissingValues { @Override public int docValueCount() { - return values.docValueCount(); + if (hasOrds) { + return values.docValueCount(); + } else { + return 1; + } } @Override @@ -339,12 +342,7 @@ public enum MissingValues { return ord + 1; } } else { - // we want to return the next missing ord but set this to - // NO_MORE_ORDS so on the next call we indicate there are no - // more values - long ordToReturn = nextMissingOrd; - nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; - return ordToReturn; + return nextMissingOrd; } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java index 742d366efa7a..472619da7862 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcher.java @@ -104,7 +104,7 @@ public class TimeSeriesIndexSearcher { Scorer scorer = weight.scorer(leaf); if (scorer != null) { if (minimumScore != null) { - scorer = new MinScoreScorer(weight, scorer, minimumScore); + scorer = new MinScoreScorer(scorer, minimumScore); } LeafWalker leafWalker = new LeafWalker(leaf, scorer, bucketCollector, () -> tsidOrd[0]); if (leafWalker.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java index 42b29fda3c47..769effdd6024 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/UnmappedFieldFetcher.java @@ -76,7 +76,7 @@ public class UnmappedFieldFetcher { for (String child : nestedChildren) { automata.add(Operations.concatenate(Automata.makeString(child + "."), Automata.makeAnyString())); } - return new CharacterRunAutomaton(Operations.union(automata)); + return new CharacterRunAutomaton(Operations.determinize(Operations.union(automata), AUTOMATON_MAX_DETERMINIZED_STATES)); } // Builds an automaton that will match any field that conforms to one of the input patterns @@ -84,7 +84,11 @@ public class UnmappedFieldFetcher { if (patterns.isEmpty()) { return null; } - return new CharacterRunAutomaton(Regex.simpleMatchToAutomaton(patterns.toArray(String[]::new)), AUTOMATON_MAX_DETERMINIZED_STATES); + Automaton a = Operations.determinize( + Regex.simpleMatchToAutomaton(patterns.toArray(String[]::new)), + AUTOMATON_MAX_DETERMINIZED_STATES + ); + return new CharacterRunAutomaton(a); } /** diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java index 1030bd87dbd1..abfcf9547276 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FragmentBuilderHelper.java @@ -40,12 +40,12 @@ public final class FragmentBuilderHelper { * than in this hack... aka. "we are are working on in!" */ final List subInfos = fragInfo.getSubInfos(); CollectionUtil.introSort(subInfos, (o1, o2) -> { - int startOffset = o1.getTermsOffsets().get(0).getStartOffset(); - int startOffset2 = o2.getTermsOffsets().get(0).getStartOffset(); + int startOffset = o1.termsOffsets().get(0).getStartOffset(); + int startOffset2 = o2.termsOffsets().get(0).getStartOffset(); return Integer.compare(startOffset, startOffset2); }); return new WeightedFragInfo( - Math.min(fragInfo.getSubInfos().get(0).getTermsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), + Math.min(fragInfo.getSubInfos().get(0).termsOffsets().get(0).getStartOffset(), fragInfo.getStartOffset()), fragInfo.getEndOffset(), subInfos, fragInfo.getTotalBoost() diff --git a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index da5d2d093fbd..78d90377cdc3 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -56,6 +56,7 @@ import java.util.PriorityQueue; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.Executor; +import java.util.stream.Collectors; /** * Context-aware extension of {@link IndexSearcher}. @@ -76,6 +77,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { private QueryProfiler profiler; private final MutableQueryTimeout cancellable; + private final boolean hasExecutor; private final int maximumNumberOfSlices; // don't create slices with less than this number of docs private final int minimumDocsPerSlice; @@ -133,6 +135,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { int minimumDocsPerSlice ) throws IOException { super(wrapWithExitableDirectoryReader ? new ExitableDirectoryReader((DirectoryReader) reader, cancellable) : reader, executor); + this.hasExecutor = executor != null; setSimilarity(similarity); setQueryCache(queryCache); setQueryCachingPolicy(queryCachingPolicy); @@ -141,6 +144,15 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { this.maximumNumberOfSlices = maximumNumberOfSlices; } + /** + * Whether an executor was provided at construction time or not. This indicates whether operations that support concurrency + * may be executed concurrently. It is not straightforward to deduct this from {@link #getTaskExecutor()} because {@link IndexSearcher} + * creates a {@link org.apache.lucene.search.TaskExecutor} anyways. + */ + public boolean hasExecutor() { + return hasExecutor; + } + @Override protected LeafSlice[] slices(List leaves) { // we offload to the executor unconditionally, including requests that don't support concurrency @@ -149,11 +161,6 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { return leafSlices; } - // package private for testing - int getMinimumDocsPerSlice() { - return minimumDocsPerSlice; - } - public void setProfiler(QueryProfiler profiler) { this.profiler = profiler; } @@ -243,7 +250,14 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { throw new IllegalArgumentException("maxSliceNum must be >= 1 (got " + maxSliceNum + ")"); } if (maxSliceNum == 1) { - return new LeafSlice[] { new LeafSlice(new ArrayList<>(leaves)) }; + return new LeafSlice[] { + new LeafSlice( + new ArrayList<>( + leaves.stream() + .map(LeafReaderContextPartition::createForEntireSegment) + .collect(Collectors.toCollection(ArrayList::new)) + ) + ) }; } // total number of documents to be searched final int numDocs = leaves.stream().mapToInt(l -> l.reader().maxDoc()).sum(); @@ -291,7 +305,11 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { for (List currentLeaf : queue) { // LeafSlice ctor reorders leaves so that leaves within a slice preserve the order they had within the IndexReader. // This is important given how Elasticsearch sorts leaves by descending @timestamp to get better query performance. - slices[upto++] = new LeafSlice(currentLeaf); + slices[upto++] = new LeafSlice( + currentLeaf.stream() + .map(LeafReaderContextPartition::createForEntireSegment) + .collect(Collectors.toCollection(ArrayList::new)) + ); } return slices; @@ -344,10 +362,10 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { } final List> listTasks = new ArrayList<>(leafSlices.length); for (int i = 0; i < leafSlices.length; ++i) { - final LeafReaderContext[] leaves = leafSlices[i].leaves; + final LeafReaderContextPartition[] leaves = leafSlices[i].partitions; final C collector = collectors.get(i); listTasks.add(() -> { - search(Arrays.asList(leaves), weight, collector); + search(leaves, weight, collector); return collector; }); } @@ -364,7 +382,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { * 2) handles the ES TimeExceededException */ @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { + public void search(LeafReaderContextPartition[] leaves, Weight weight, Collector collector) throws IOException { boolean success = false; try { super.search(leaves, weight, collector); @@ -412,7 +430,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { } @Override - protected void searchLeaf(LeafReaderContext ctx, Weight weight, Collector collector) throws IOException { + protected void searchLeaf(LeafReaderContext ctx, int minDocId, int maxDocId, Weight weight, Collector collector) throws IOException { cancellable.checkCancelled(); final LeafCollector leafCollector; try { @@ -432,7 +450,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { bulkScorer = new CancellableBulkScorer(bulkScorer, cancellable::checkCancelled); } try { - bulkScorer.score(leafCollector, liveDocs); + bulkScorer.score(leafCollector, liveDocs, minDocId, maxDocId); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf diff --git a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java index 3bdd7ff3630c..64b54d3623f0 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ExitableDirectoryReader.java @@ -14,9 +14,9 @@ import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FilterLeafReader; -import org.apache.lucene.index.FilterVectorValues; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.PointValues; import org.apache.lucene.index.QueryTimeout; @@ -32,6 +32,7 @@ import org.apache.lucene.util.automaton.CompiledAutomaton; import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import java.io.IOException; +import java.util.Objects; /** * Wraps an {@link IndexReader} with a {@link QueryCancellation} @@ -459,7 +460,6 @@ class ExitableDirectoryReader extends FilterDirectoryReader { } private static class ExitableByteVectorValues extends ByteVectorValues { - private int calls; private final QueryCancellation queryCancellation; private final ByteVectorValues in; @@ -479,8 +479,13 @@ class ExitableDirectoryReader extends FilterDirectoryReader { } @Override - public byte[] vectorValue() throws IOException { - return in.vectorValue(); + public byte[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); + } + + @Override + public int ordToDoc(int ord) { + return in.ordToDoc(ord); } @Override @@ -505,33 +510,17 @@ class ExitableDirectoryReader extends FilterDirectoryReader { } @Override - public int docID() { - return in.docID(); + public DocIndexIterator iterator() { + return createExitableIterator(in.iterator(), queryCancellation); } @Override - public int nextDoc() throws IOException { - final int nextDoc = in.nextDoc(); - checkAndThrowWithSampling(); - return nextDoc; - } - - @Override - public int advance(int target) throws IOException { - final int advance = in.advance(target); - checkAndThrowWithSampling(); - return advance; - } - - private void checkAndThrowWithSampling() { - if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { - this.queryCancellation.checkCancelled(); - } + public ByteVectorValues copy() throws IOException { + return in.copy(); } } - private static class ExitableFloatVectorValues extends FilterVectorValues { - private int calls; + private static class ExitableFloatVectorValues extends FilterFloatVectorValues { private final QueryCancellation queryCancellation; ExitableFloatVectorValues(FloatVectorValues vectorValues, QueryCancellation queryCancellation) { @@ -541,17 +530,13 @@ class ExitableDirectoryReader extends FilterDirectoryReader { } @Override - public int advance(int target) throws IOException { - final int advance = super.advance(target); - checkAndThrowWithSampling(); - return advance; + public float[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); } @Override - public int nextDoc() throws IOException { - final int nextDoc = super.nextDoc(); - checkAndThrowWithSampling(); - return nextDoc; + public int ordToDoc(int ord) { + return in.ordToDoc(ord); } @Override @@ -575,11 +560,59 @@ class ExitableDirectoryReader extends FilterDirectoryReader { }; } - private void checkAndThrowWithSampling() { - if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { - this.queryCancellation.checkCancelled(); - } + @Override + public DocIndexIterator iterator() { + return createExitableIterator(in.iterator(), queryCancellation); } + + @Override + public FloatVectorValues copy() throws IOException { + return in.copy(); + } + } + + private static KnnVectorValues.DocIndexIterator createExitableIterator( + KnnVectorValues.DocIndexIterator delegate, + QueryCancellation queryCancellation + ) { + return new KnnVectorValues.DocIndexIterator() { + private int calls; + + @Override + public int index() { + return delegate.index(); + } + + @Override + public int docID() { + return delegate.docID(); + } + + @Override + public long cost() { + return delegate.cost(); + } + + @Override + public int nextDoc() throws IOException { + int nextDoc = delegate.nextDoc(); + checkAndThrowWithSampling(); + return nextDoc; + } + + @Override + public int advance(int target) throws IOException { + final int advance = delegate.advance(target); + checkAndThrowWithSampling(); + return advance; + } + + private void checkAndThrowWithSampling() { + if ((calls++ & ExitableIntersectVisitor.MAX_CALLS_BEFORE_QUERY_TIMEOUT_CHECK) == 0) { + queryCancellation.checkCancelled(); + } + } + }; } private static class ExitableDocSetIterator extends DocIdSetIterator { @@ -622,4 +655,43 @@ class ExitableDirectoryReader extends FilterDirectoryReader { } } } + + /** Delegates all methods to a wrapped {@link FloatVectorValues}. */ + private abstract static class FilterFloatVectorValues extends FloatVectorValues { + + /** Wrapped values */ + protected final FloatVectorValues in; + + /** Sole constructor */ + protected FilterFloatVectorValues(FloatVectorValues in) { + Objects.requireNonNull(in); + this.in = in; + } + + @Override + public DocIndexIterator iterator() { + return in.iterator(); + } + + @Override + public float[] vectorValue(int ord) throws IOException { + return in.vectorValue(ord); + } + + @Override + public FloatVectorValues copy() throws IOException { + return in.copy(); + } + + @Override + public int dimension() { + return in.dimension(); + } + + @Override + public int size() { + return in.size(); + } + + } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java index 9b594e293550..f03be3f09b7d 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java +++ b/server/src/main/java/org/elasticsearch/search/internal/FieldUsageTrackingDirectoryReader.java @@ -103,15 +103,6 @@ public class FieldUsageTrackingDirectoryReader extends FilterDirectoryReader { this.notifier = notifier; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields f = super.getTermVectors(docID); - if (f != null) { - f = new FieldUsageTrackingTermVectorFields(f); - } - return f; - } - @Override public TermVectors termVectors() throws IOException { TermVectors termVectors = super.termVectors(); @@ -136,11 +127,6 @@ public class FieldUsageTrackingDirectoryReader extends FilterDirectoryReader { return pointValues; } - @Override - public void document(final int docID, final StoredFieldVisitor visitor) throws IOException { - storedFields().document(docID, visitor); - } - @Override public StoredFields storedFields() throws IOException { StoredFields storedFields = super.storedFields(); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java index cd8f381e85f8..f559325063be 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileScorer.java @@ -12,7 +12,6 @@ package org.elasticsearch.search.profile.query; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import org.elasticsearch.search.profile.Timer; import java.io.IOException; @@ -25,15 +24,12 @@ import java.util.Collection; final class ProfileScorer extends Scorer { private final Scorer scorer; - private final ProfileWeight profileWeight; private final Timer scoreTimer, nextDocTimer, advanceTimer, matchTimer, shallowAdvanceTimer, computeMaxScoreTimer, setMinCompetitiveScoreTimer; - ProfileScorer(ProfileWeight w, Scorer scorer, QueryProfileBreakdown profile) { - super(w); + ProfileScorer(Scorer scorer, QueryProfileBreakdown profile) { this.scorer = scorer; - this.profileWeight = w; scoreTimer = profile.getNewTimer(QueryTimingType.SCORE); nextDocTimer = profile.getNewTimer(QueryTimingType.NEXT_DOC); advanceTimer = profile.getNewTimer(QueryTimingType.ADVANCE); @@ -58,11 +54,6 @@ final class ProfileScorer extends Scorer { } } - @Override - public Weight getWeight() { - return profileWeight; - } - @Override public Collection getChildren() throws IOException { return scorer.getChildren(); diff --git a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java index 27bf8ea8aae4..5d35699adec9 100644 --- a/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java +++ b/server/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java @@ -37,15 +37,6 @@ public final class ProfileWeight extends Weight { this.profile = profile; } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier supplier = scorerSupplier(context); - if (supplier == null) { - return null; - } - return supplier.get(Long.MAX_VALUE); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final Timer timer = profile.getNewTimer(QueryTimingType.BUILD_SCORER); @@ -67,12 +58,24 @@ public final class ProfileWeight extends Weight { public Scorer get(long loadCost) throws IOException { timer.start(); try { - return new ProfileScorer(weight, subQueryScorerSupplier.get(loadCost), profile); + return new ProfileScorer(subQueryScorerSupplier.get(loadCost), profile); } finally { timer.stop(); } } + @Override + public BulkScorer bulkScorer() throws IOException { + // We use the default bulk scorer instead of the specialized one. The reason + // is that Lucene's BulkScorers do everything at once: finding matches, + // scoring them and calling the collector, so they make it impossible to + // see where time is spent, which is the purpose of query profiling. + // The default bulk scorer will pull a scorer and iterate over matches, + // this might be a significantly different execution path for some queries + // like disjunctions, but in general this is what is done anyway + return super.bulkScorer(); + } + @Override public long cost() { timer.start(); @@ -90,18 +93,6 @@ public final class ProfileWeight extends Weight { }; } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - // We use the default bulk scorer instead of the specialized one. The reason - // is that Lucene's BulkScorers do everything at once: finding matches, - // scoring them and calling the collector, so they make it impossible to - // see where time is spent, which is the purpose of query profiling. - // The default bulk scorer will pull a scorer and iterate over matches, - // this might be a significantly different execution path for some queries - // like disjunctions, but in general this is what is done anyway - return super.bulkScorer(context); - } - @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { return subQueryWeight.explain(context, doc); diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index d17cd4f69dec..af65c30b49dc 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -248,7 +248,7 @@ public class QueryPhase { } final Sort sort = sortAndFormats.sort; for (LeafReaderContext ctx : reader.leaves()) { - Sort indexSort = ctx.reader().getMetaData().getSort(); + Sort indexSort = ctx.reader().getMetaData().sort(); if (indexSort == null || Lucene.canEarlyTerminate(sort, indexSort) == false) { return false; } diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java index d1cbdd6adb76..00cf90fe1230 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java @@ -202,7 +202,11 @@ public final class QueryPhaseCollector implements TwoPhaseCollector { } }; } - return new CompositeLeafCollector(postFilterBits, topDocsLeafCollector, aggsLeafCollector); + LeafCollector leafCollector = new CompositeLeafCollector(postFilterBits, topDocsLeafCollector, aggsLeafCollector); + if (cacheScores && topDocsLeafCollector != null && aggsLeafCollector != null) { + leafCollector = ScoreCachingWrappingScorer.wrap(leafCollector); + } + return leafCollector; } private static FilterScorable wrapToIgnoreMinCompetitiveScore(Scorable scorer) { @@ -263,9 +267,6 @@ public final class QueryPhaseCollector implements TwoPhaseCollector { @Override public void setScorer(Scorable scorer) throws IOException { - if (cacheScores && topDocsLeafCollector != null && aggsLeafCollector != null) { - scorer = ScoreCachingWrappingScorer.wrap(scorer); - } // Ignore calls to setMinCompetitiveScore so that if the top docs collector // wants to skip low-scoring hits, the aggs collector still sees all hits. // this is important also for terminate_after in case used when total hits tracking is early terminated. diff --git a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java index b78d9e40ba12..2cb960e7e73c 100644 --- a/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java +++ b/server/src/main/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQuery.java @@ -13,7 +13,6 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexSearcher; @@ -106,12 +105,12 @@ public class RankDocsQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { /** * We return a scorer even if there are no ranked documents within the segment. * This ensures the correct propagation of the maximum score. */ - return new Scorer(this) { + Scorer scorer = new Scorer() { final int lower = segmentStarts[context.ord]; final int upper = segmentStarts[context.ord + 1]; int upTo = -1; @@ -180,6 +179,7 @@ public class RankDocsQuery extends Query { } }; + return new DefaultScorerSupplier(scorer); } @Override @@ -325,11 +325,6 @@ public class RankDocsQuery extends Query { return topWeight.explain(context, doc); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return combinedWeight.scorer(context); - } - @Override public boolean isCacheable(LeafReaderContext ctx) { return combinedWeight.isCacheable(ctx); @@ -340,11 +335,6 @@ public class RankDocsQuery extends Query { return combinedWeight.matches(context, doc); } - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - return combinedWeight.bulkScorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return combinedWeight.scorerSupplier(context); diff --git a/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java index 5077d68c12ba..c65c2bb6650c 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/AbstractScriptFieldQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.script.AbstractFieldScript; @@ -69,10 +70,11 @@ public abstract class AbstractScriptFieldQuery ex } @Override - public Scorer scorer(LeafReaderContext ctx) { + public ScorerSupplier scorerSupplier(LeafReaderContext ctx) throws IOException { S scriptContext = scriptContextFunction.apply(ctx); DocIdSetIterator approximation = DocIdSetIterator.all(ctx.reader().maxDoc()); - return new ConstantScoreScorer(this, score(), scoreMode, createTwoPhaseIterator(scriptContext, approximation)); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, createTwoPhaseIterator(scriptContext, approximation)); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java index 751ecb18cc68..430d22ebc908 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/GeoPointScriptFieldDistanceFeatureQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.SloppyMath; @@ -79,8 +80,9 @@ public final class GeoPointScriptFieldDistanceFeatureQuery extends AbstractScrip } @Override - public Scorer scorer(LeafReaderContext context) { - return new DistanceScorer(this, scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new DistanceScorer(scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + return new DefaultScorerSupplier(scorer); } @Override @@ -116,8 +118,7 @@ public final class GeoPointScriptFieldDistanceFeatureQuery extends AbstractScrip private final DocIdSetIterator disi; private final float weight; - protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) { - super(weight); + protected DistanceScorer(AbstractLongFieldScript script, int maxDoc, float boost) { this.script = script; twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java index 7c8ac4a8cae6..d18098ee7de3 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/LongScriptFieldDistanceFeatureQuery.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.elasticsearch.script.AbstractLongFieldScript; @@ -56,8 +57,10 @@ public final class LongScriptFieldDistanceFeatureQuery extends AbstractScriptFie } @Override - public Scorer scorer(LeafReaderContext context) { - return new DistanceScorer(this, scriptContextFunction().apply(context), context.reader().maxDoc(), boost); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + return new DefaultScorerSupplier( + new DistanceScorer(scriptContextFunction().apply(context), context.reader().maxDoc(), boost) + ); } @Override @@ -84,8 +87,7 @@ public final class LongScriptFieldDistanceFeatureQuery extends AbstractScriptFie private final DocIdSetIterator disi; private final float weight; - protected DistanceScorer(Weight weight, AbstractLongFieldScript script, int maxDoc, float boost) { - super(weight); + protected DistanceScorer(AbstractLongFieldScript script, int maxDoc, float boost) { this.script = script; twoPhase = new TwoPhaseIterator(DocIdSetIterator.all(maxDoc)) { @Override diff --git a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java index ab32427ed4ac..3c5931367370 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldRegexpQuery.java @@ -10,6 +10,7 @@ package org.elasticsearch.search.runtime; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.script.Script; import org.elasticsearch.script.StringFieldScript; @@ -34,7 +35,12 @@ public class StringScriptFieldRegexpQuery extends AbstractStringScriptFieldAutom script, leafFactory, fieldName, - new ByteRunAutomaton(new RegExp(Objects.requireNonNull(pattern), syntaxFlags, matchFlags).toAutomaton(maxDeterminizedStates)) + new ByteRunAutomaton( + Operations.determinize( + new RegExp(Objects.requireNonNull(pattern), syntaxFlags, matchFlags).toAutomaton(), + maxDeterminizedStates + ) + ) ); this.pattern = pattern; this.syntaxFlags = syntaxFlags; diff --git a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java index 5bacaf0d36b5..6c1aa6f72c4a 100644 --- a/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java +++ b/server/src/main/java/org/elasticsearch/search/runtime/StringScriptFieldWildcardQuery.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.lucene.search.AutomatonQueries; import org.elasticsearch.script.Script; import org.elasticsearch.script.StringFieldScript; @@ -44,7 +45,7 @@ public class StringScriptFieldWildcardQuery extends AbstractStringScriptFieldAut if (caseInsensitive) { return AutomatonQueries.toCaseInsensitiveWildcardAutomaton(term); } - return WildcardQuery.toAutomaton(term); + return WildcardQuery.toAutomaton(term, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java index f4ab7e29e168..6de888ac8aff 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/DocIdSliceQuery.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.search.sort.FieldSortBuilder; @@ -58,9 +59,10 @@ public final class DocIdSliceQuery extends SliceQuery { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { DocIdSetIterator iterator = createIterator(context, sliceStart, sliceStart + sliceSize); - return new ConstantScoreScorer(this, boost, scoreMode, iterator); + Scorer scorer = new ConstantScoreScorer(boost, scoreMode, iterator); + return new DefaultScorerSupplier(scorer); } private static DocIdSetIterator createIterator(LeafReaderContext context, int sliceStart, int sliceEnd) { diff --git a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java index b66ae219ace9..05cf173468fd 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -41,7 +42,7 @@ public final class DocValuesSliceQuery extends SliceQuery { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final SortedNumericDocValues values = DocValues.getSortedNumeric(context.reader(), getField()); final DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @@ -66,7 +67,8 @@ public final class DocValuesSliceQuery extends SliceQuery { return 10; } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, twoPhase); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java index 3927f54461bb..9aecbfdd84ee 100644 --- a/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java +++ b/server/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java @@ -21,6 +21,7 @@ import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.DocIdSetBuilder; @@ -49,10 +50,11 @@ public final class TermsSliceQuery extends SliceQuery { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final DocIdSet disi = build(context.reader()); final DocIdSetIterator leafIt = disi.iterator(); - return new ConstantScoreScorer(this, score(), scoreMode, leafIt); + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, leafIt); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java index a6fd4ef90693..e60e534d6aca 100644 --- a/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/sort/ScoreSortBuilder.java @@ -126,7 +126,6 @@ public final class ScoreSortBuilder extends SortBuilder { @Override protected boolean advanceExact(int doc) throws IOException { - assert doc == scorer.docID() : "expected scorer to be on [" + doc + "] but was on [" + scorer.docID() + "]"; /* We will never be called by documents that don't match the * query and they'll all have a score, thus `true`. */ score = scorer.score(); diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index 8fdc33f38934..fd6cfeaea639 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -11,6 +11,7 @@ package org.elasticsearch.search.suggest.completion; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.CollectionTerminatedException; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Weight; @@ -84,7 +85,7 @@ public class CompletionSuggester extends Suggester LeafCollector leafCollector = null; try { leafCollector = collector.getLeafCollector(context); - scorer.score(leafCollector, context.reader().getLiveDocs()); + scorer.score(leafCollector, context.reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); } catch (CollectionTerminatedException e) { // collection was terminated prematurely // continue with the following leaf diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java index 1366da366b06..ed8197786ba7 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java @@ -133,7 +133,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator { * because that's what {@link DirectSpellChecker#suggestSimilar} expects * when filtering terms. */ - int threshold = thresholdTermFrequency(original.termStats.docFreq); + int threshold = thresholdTermFrequency(original.termStats.docFreq()); if (threshold == Integer.MAX_VALUE) { // the threshold is the max possible frequency so we can skip the search return set; @@ -226,7 +226,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator { } private static double score(TermStats termStats, double errorScore, long dictionarySize) { - return errorScore * (((double) termStats.totalTermFreq + 1) / ((double) dictionarySize + 1)); + return errorScore * (((double) termStats.totalTermFreq() + 1) / ((double) dictionarySize + 1)); } // package protected for test diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java index a47cd5fe5a84..0fd3ebcd0086 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LaplaceScorer.java @@ -36,7 +36,7 @@ final class LaplaceScorer extends WordScorer { @Override protected double scoreBigram(Candidate word, Candidate w_1) throws IOException { join(separator, spare, w_1.term, word.term); - return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq + alpha * numTerms); + return (alpha + frequency(spare.get())) / (w_1.termStats.totalTermFreq() + alpha * numTerms); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java index fe64a6549877..0d6631130308 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/LinearInterpolatingScorer.java @@ -58,7 +58,7 @@ public final class LinearInterpolatingScorer extends WordScorer { if (count < 1) { return unigramLambda * scoreUnigram(word); } - return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq)) + unigramLambda * scoreUnigram(word); + return bigramLambda * (count / (0.5d + w_1.termStats.totalTermFreq())) + unigramLambda * scoreUnigram(word); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java index 7257a0d97245..21d1f34b68ee 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/NoisyChannelSpellChecker.java @@ -70,7 +70,7 @@ final class NoisyChannelSpellChecker { if (posIncAttr.getPositionIncrement() == 0 && typeAttribute.type() == SynonymFilter.TYPE_SYNONYM) { assert currentSet != null; TermStats termStats = generator.termStats(term); - if (termStats.docFreq > 0) { + if (termStats.docFreq() > 0) { currentSet.addOneCandidate(generator.createCandidate(BytesRef.deepCopyOf(term), termStats, realWordLikelihood)); } } else { diff --git a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java index d893e0986e0d..270866c14b20 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/phrase/StupidBackoffScorer.java @@ -35,7 +35,7 @@ class StupidBackoffScorer extends WordScorer { if (count < 1) { return discount * scoreUnigram(word); } - return count / (w_1.termStats.totalTermFreq + 0.00000000001d); + return count / (w_1.termStats.totalTermFreq() + 0.00000000001d); } @Override @@ -50,7 +50,7 @@ class StupidBackoffScorer extends WordScorer { join(separator, spare, w_2.term, w_1.term, w.term); long trigramCount = frequency(spare.get()); if (trigramCount < 1) { - return discount * (bigramCount / (w_1.termStats.totalTermFreq + 0.00000000001d)); + return discount * (bigramCount / (w_1.termStats.totalTermFreq() + 0.00000000001d)); } return trigramCount / (bigramCount + 0.00000000001d); } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java index 44bbd0f50951..31e19b678475 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/DenseVectorQuery.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.VectorScorer; import org.apache.lucene.search.Weight; @@ -70,12 +71,12 @@ public abstract class DenseVectorQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { - VectorScorer vectorScorer = vectorScorer(leafReaderContext); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + VectorScorer vectorScorer = vectorScorer(context); if (vectorScorer == null) { return null; } - return new DenseVectorScorer(this, vectorScorer); + return new DefaultScorerSupplier(new DenseVectorScorer(vectorScorer, boost)); } @Override @@ -178,11 +179,10 @@ public abstract class DenseVectorQuery extends Query { private final DocIdSetIterator iterator; private final float boost; - DenseVectorScorer(DenseVectorWeight weight, VectorScorer vectorScorer) { - super(weight); + DenseVectorScorer(VectorScorer vectorScorer, float boost) { this.vectorScorer = vectorScorer; this.iterator = vectorScorer.iterator(); - this.boost = weight.boost; + this.boost = boost; } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java index 413840f2b451..9f3d83b4da08 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenByteKnnVectorQuery.java @@ -34,7 +34,7 @@ public class ESDiversifyingChildrenByteKnnVectorQuery extends DiversifyingChildr @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java index 80704a3b552f..3907bdf89bc6 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESDiversifyingChildrenFloatKnnVectorQuery.java @@ -34,7 +34,7 @@ public class ESDiversifyingChildrenFloatKnnVectorQuery extends DiversifyingChild @Override protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java index 14bb94a366e5..9363f67a7350 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnByteVectorQuery.java @@ -27,7 +27,7 @@ public class ESKnnByteVectorQuery extends KnnByteVectorQuery implements Profilin protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { // if k param is set, we get only top k results from each shard TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java index 590d8cfbbaba..be0437af9131 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/ESKnnFloatVectorQuery.java @@ -27,7 +27,7 @@ public class ESKnnFloatVectorQuery extends KnnFloatVectorQuery implements Profil protected TopDocs mergeLeafResults(TopDocs[] perLeafResults) { // if k param is set, we get only top k results from each shard TopDocs topK = kParam == null ? super.mergeLeafResults(perLeafResults) : TopDocs.merge(kParam, perLeafResults); - vectorOpsCount = topK.totalHits.value; + vectorOpsCount = topK.totalHits.value(); return topK; } diff --git a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java index 06fb109d6580..bb83b8528c6c 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/KnnScoreDocQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import java.io.IOException; @@ -88,13 +89,13 @@ public class KnnScoreDocQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { // Segment starts indicate how many docs are in the segment, // upper equalling lower indicates no documents for this segment if (segmentStarts[context.ord] == segmentStarts[context.ord + 1]) { return null; } - return new Scorer(this) { + Scorer scorer = new Scorer() { final int lower = segmentStarts[context.ord]; final int upper = segmentStarts[context.ord + 1]; int upTo = -1; @@ -177,6 +178,7 @@ public class KnnScoreDocQuery extends Query { } }; + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java index 77f60adc4fcd..5219778047bc 100644 --- a/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java +++ b/server/src/main/java/org/elasticsearch/search/vectors/VectorSimilarityQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.elasticsearch.common.lucene.search.function.MinScoreScorer; @@ -142,12 +143,22 @@ public class VectorSimilarityQuery extends Query { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - Scorer innerScorer = in.scorer(context); - if (innerScorer == null) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier inScorerSupplier = in.scorerSupplier(context); + if (inScorerSupplier == null) { return null; } - return new MinScoreScorer(this, innerScorer, docScore, boost); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new MinScoreScorer(inScorerSupplier.get(leadCost), docScore, boost); + } + + @Override + public long cost() { + return inScorerSupplier.cost(); + } + }; } } diff --git a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java index b14116b3d55b..c760e8043e26 100644 --- a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java @@ -230,7 +230,7 @@ public class SynonymsManagementAPIService { client.prepareSearch(SYNONYMS_ALIAS_NAME) .setSource(new SearchSourceBuilder().size(0).trackTotalHits(true)) .execute(listener.delegateFailureAndWrap((searchListener, countResponse) -> { - long totalSynonymRules = countResponse.getHits().getTotalHits().value; + long totalSynonymRules = countResponse.getHits().getTotalHits().value(); if (totalSynonymRules > maxSynonymsSets) { logger.warn( "The number of synonym rules in the synonym set [{}] exceeds the maximum allowed." @@ -265,7 +265,7 @@ public class SynonymsManagementAPIService { .setPreference(Preference.LOCAL.type()) .setTrackTotalHits(true) .execute(new DelegatingIndexNotFoundActionListener<>(synonymSetId, listener, (searchListener, searchResponse) -> { - final long totalSynonymRules = searchResponse.getHits().getTotalHits().value; + final long totalSynonymRules = searchResponse.getHits().getTotalHits().value(); // If there are no rules, check that the synonym set actually exists to return the proper error if (totalSynonymRules == 0) { checkSynonymSetExists(synonymSetId, searchListener.delegateFailure((existsListener, response) -> { @@ -383,7 +383,7 @@ public class SynonymsManagementAPIService { .setPreference(Preference.LOCAL.type()) .setTrackTotalHits(true) .execute(l1.delegateFailureAndWrap((searchListener, searchResponse) -> { - long synonymsSetSize = searchResponse.getHits().getTotalHits().value; + long synonymsSetSize = searchResponse.getHits().getTotalHits().value(); if (synonymsSetSize >= maxSynonymsSets) { listener.onFailure( new IllegalArgumentException("The number of synonym rules in a synonyms set cannot exceed " + maxSynonymsSets) diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 4e85ba2cf479..33c808197120 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -1,2 +1,3 @@ org.elasticsearch.index.codec.Elasticsearch814Codec org.elasticsearch.index.codec.Elasticsearch816Codec +org.elasticsearch.index.codec.Elasticsearch900Codec diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index 65464c7f14a5..bf4a28b9c60b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -12,8 +12,8 @@ package org.elasticsearch.action.admin.indices.diskusage; import org.apache.lucene.codecs.DocValuesFormat; import org.apache.lucene.codecs.KnnVectorsFormat; import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; @@ -53,6 +53,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.search.suggest.document.Completion912PostingsFormat; import org.apache.lucene.search.suggest.document.CompletionPostingsFormat; @@ -327,7 +328,7 @@ public class IndexDiskUsageAnalyzerTests extends ESTestCase { public void testCompletionField() throws Exception { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(false) - .setCodec(new Lucene912Codec(Lucene912Codec.Mode.BEST_SPEED) { + .setCodec(new Lucene100Codec(Lucene100Codec.Mode.BEST_SPEED) { @Override public PostingsFormat getPostingsFormatForField(String field) { if (field.startsWith("suggest_")) { @@ -414,25 +415,25 @@ public class IndexDiskUsageAnalyzerTests extends ESTestCase { enum CodecMode { BEST_SPEED { @Override - Lucene912Codec.Mode mode() { - return Lucene912Codec.Mode.BEST_SPEED; + Lucene100Codec.Mode mode() { + return Lucene100Codec.Mode.BEST_SPEED; } }, BEST_COMPRESSION { @Override - Lucene912Codec.Mode mode() { - return Lucene912Codec.Mode.BEST_COMPRESSION; + Lucene100Codec.Mode mode() { + return Lucene100Codec.Mode.BEST_COMPRESSION; } }; - abstract Lucene912Codec.Mode mode(); + abstract Lucene100Codec.Mode mode(); } static void indexRandomly(Directory directory, CodecMode codecMode, int numDocs, Consumer addFields) throws IOException { IndexWriterConfig config = new IndexWriterConfig().setCommitOnClose(true) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene912Codec(codecMode.mode())); + .setCodec(new Lucene100Codec(codecMode.mode())); try (IndexWriter writer = new IndexWriter(directory, config)) { for (int i = 0; i < numDocs; i++) { final Document doc = new Document(); @@ -640,7 +641,7 @@ public class IndexDiskUsageAnalyzerTests extends ESTestCase { try (DirectoryReader reader = DirectoryReader.open(source)) { IndexWriterConfig config = new IndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) .setUseCompoundFile(randomBoolean()) - .setCodec(new Lucene912Codec(mode.mode()) { + .setCodec(new Lucene100Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { return new ES812PostingsFormat(); @@ -687,7 +688,7 @@ public class IndexDiskUsageAnalyzerTests extends ESTestCase { final String[] files; final Directory directory; if (sis.getUseCompoundFile()) { - directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.READ); + directory = sis.getCodec().compoundFormat().getCompoundReader(reader.directory(), sis, IOContext.DEFAULT); files = directory.listAll(); } else { directory = reader.directory(); @@ -785,14 +786,15 @@ public class IndexDiskUsageAnalyzerTests extends ESTestCase { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, 1.0f) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { final FixedBitSet bits = new FixedBitSet(context.reader().maxDoc()); for (int i = 0; i < bits.length(); i++) { if (randomBoolean()) { bits.set(i); } } - return new ConstantScoreScorer(this, 1.0f, ScoreMode.COMPLETE_NO_SCORES, new BitSetIterator(bits, bits.length())); + Scorer scorer = new ConstantScoreScorer(1.0f, ScoreMode.COMPLETE_NO_SCORES, new BitSetIterator(bits, bits.length())); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java index b7919878f908..681d9d000bee 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CountOnlyQueryPhaseResultConsumerTests.java @@ -79,8 +79,8 @@ public class CountOnlyQueryPhaseResultConsumerTests extends ESTestCase { queryPhaseResultConsumer.consumeResult(querySearchResult, nextCounter::incrementAndGet); } var reducePhase = queryPhaseResultConsumer.reduce(); - assertEquals(0, reducePhase.totalHits().value); - assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); + assertEquals(0, reducePhase.totalHits().value()); + assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation()); assertFalse(reducePhase.isEmptyResult()); assertEquals(10, nextCounter.get()); } @@ -94,8 +94,8 @@ public class CountOnlyQueryPhaseResultConsumerTests extends ESTestCase { ) ) { var reducePhase = queryPhaseResultConsumer.reduce(); - assertEquals(0, reducePhase.totalHits().value); - assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation); + assertEquals(0, reducePhase.totalHits().value()); + assertEquals(TotalHits.Relation.EQUAL_TO, reducePhase.totalHits().relation()); assertTrue(reducePhase.isEmptyResult()); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index 90174a89209b..99401e8a8d40 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -151,11 +151,11 @@ public class DfsQueryPhaseTests extends ESTestCase { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNotNull(responseRef.get().get(1)); assertNull(responseRef.get().get(1).fetchResult()); - assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(1).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(84, responseRef.get().get(1).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertTrue(mockSearchPhaseContext.releasedSearchContexts.isEmpty()); assertEquals(2, mockSearchPhaseContext.numSuccess.get()); @@ -236,7 +236,7 @@ public class DfsQueryPhaseTests extends ESTestCase { assertNotNull(responseRef.get()); assertNotNull(responseRef.get().get(0)); assertNull(responseRef.get().get(0).fetchResult()); - assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value); + assertEquals(1, responseRef.get().get(0).queryResult().topDocs().topDocs.totalHits.value()); assertEquals(42, responseRef.get().get(0).queryResult().topDocs().topDocs.scoreDocs[0].doc); assertNull(responseRef.get().get(1)); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 31ef57482cab..09dd7821cd12 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -116,7 +116,7 @@ public class FetchSearchPhaseTests extends ESTestCase { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); if (numHits != 0) { assertEquals(42, searchResponse.getHits().getAt(0).docId()); } @@ -244,7 +244,7 @@ public class FetchSearchPhaseTests extends ESTestCase { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(42, searchResponse.getHits().getAt(1).docId()); assertEquals(0, searchResponse.getFailedShards()); @@ -353,7 +353,7 @@ public class FetchSearchPhaseTests extends ESTestCase { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(1, searchResponse.getFailedShards()); assertEquals(1, searchResponse.getSuccessfulShards()); @@ -468,7 +468,7 @@ public class FetchSearchPhaseTests extends ESTestCase { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(numHits, searchResponse.getHits().getTotalHits().value); + assertEquals(numHits, searchResponse.getHits().getTotalHits().value()); assertEquals(Math.min(numHits, resultSetSize), searchResponse.getHits().getHits().length); SearchHit[] hits = searchResponse.getHits().getHits(); for (int i = 0; i < hits.length; i++) { @@ -703,7 +703,7 @@ public class FetchSearchPhaseTests extends ESTestCase { mockSearchPhaseContext.assertNoFailure(); SearchResponse searchResponse = mockSearchPhaseContext.searchResponse.get(); assertNotNull(searchResponse); - assertEquals(2, searchResponse.getHits().getTotalHits().value); + assertEquals(2, searchResponse.getHits().getTotalHits().value()); assertEquals(1, searchResponse.getHits().getHits().length); assertEquals(84, searchResponse.getHits().getAt(0).docId()); assertEquals(0, searchResponse.getFailedShards()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 857402d1baaa..9a507977c012 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -297,8 +297,8 @@ public class SearchPhaseControllerTests extends ESTestCase { if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + assertThat(mergedResponse.hits.getTotalHits().value(), equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation(), Relation.EQUAL_TO); } for (SearchHit hit : mergedResponse.hits().getHits()) { SearchPhaseResult searchPhaseResult = fetchResults.get(hit.getShard().getShardId().id()); @@ -415,8 +415,8 @@ public class SearchPhaseControllerTests extends ESTestCase { if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { - assertThat(mergedResponse.hits.getTotalHits().value, equalTo(0L)); - assertEquals(mergedResponse.hits.getTotalHits().relation, Relation.EQUAL_TO); + assertThat(mergedResponse.hits.getTotalHits().value(), equalTo(0L)); + assertEquals(mergedResponse.hits.getTotalHits().relation(), Relation.EQUAL_TO); } int rank = 1; for (SearchHit hit : mergedResponse.hits().getHits()) { @@ -522,8 +522,8 @@ public class SearchPhaseControllerTests extends ESTestCase { int resultCount = 0; for (SearchPhaseResult shardResult : results.asList()) { TopDocs topDocs = shardResult.queryResult().topDocs().topDocs; - assert topDocs.totalHits.relation == Relation.EQUAL_TO; - resultCount += (int) topDocs.totalHits.value; + assert topDocs.totalHits.relation() == Relation.EQUAL_TO; + resultCount += (int) topDocs.totalHits.value(); } return resultCount; } @@ -784,7 +784,7 @@ public class SearchPhaseControllerTests extends ESTestCase { assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); @@ -844,7 +844,7 @@ public class SearchPhaseControllerTests extends ESTestCase { assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(0, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); @@ -902,7 +902,7 @@ public class SearchPhaseControllerTests extends ESTestCase { assertAggReduction(request); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); @@ -969,7 +969,7 @@ public class SearchPhaseControllerTests extends ESTestCase { ScoreDoc[] scoreDocs = reduce.sortedTopDocs().scoreDocs(); assertEquals(5, scoreDocs.length); assertEquals(100.f, reduce.maxScore(), 0.0f); - assertEquals(12, reduce.totalHits().value); + assertEquals(12, reduce.totalHits().value()); assertEquals(95.0f, scoreDocs[0].score, 0.0f); assertEquals(94.0f, scoreDocs[1].score, 0.0f); assertEquals(93.0f, scoreDocs[2].score, 0.0f); @@ -1022,7 +1022,7 @@ public class SearchPhaseControllerTests extends ESTestCase { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(Math.min(expectedNumResults, size), reduce.sortedTopDocs().scoreDocs().length); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[0]).fields[0]); assertTrue(reduce.sortedTopDocs().isSortedByField()); assertEquals(1, reduce.sortedTopDocs().sortFields().length); @@ -1079,7 +1079,7 @@ public class SearchPhaseControllerTests extends ESTestCase { SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce(); assertAggReduction(request); assertEquals(3, reduce.sortedTopDocs().scoreDocs().length); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(a, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[0]).fields[0]); assertEquals(b, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[1]).fields[0]); assertEquals(c, ((FieldDoc) reduce.sortedTopDocs().scoreDocs()[2]).fields[0]); @@ -1199,7 +1199,7 @@ public class SearchPhaseControllerTests extends ESTestCase { assertEquals(maxScoreCompletion, reduce.sortedTopDocs().scoreDocs()[0].score, 0f); assertEquals(0, reduce.sortedTopDocs().scoreDocs()[0].doc); assertNotEquals(-1, reduce.sortedTopDocs().scoreDocs()[0].shardIndex); - assertEquals(0, reduce.totalHits().value); + assertEquals(0, reduce.totalHits().value()); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); assertNull(reduce.sortedTopDocs().collapseField()); @@ -1290,7 +1290,7 @@ public class SearchPhaseControllerTests extends ESTestCase { assertEquals(max.get(), internalMax.value(), 0.0D); assertEquals(1, reduce.sortedTopDocs().scoreDocs().length); assertEquals(max.get(), reduce.maxScore(), 0.0f); - assertEquals(expectedNumResults, reduce.totalHits().value); + assertEquals(expectedNumResults, reduce.totalHits().value()); assertEquals(max.get(), reduce.sortedTopDocs().scoreDocs()[0].score, 0.0f); assertFalse(reduce.sortedTopDocs().isSortedByField()); assertNull(reduce.sortedTopDocs().sortFields()); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index d279fa5030a8..e4284937474c 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -230,11 +230,11 @@ public class SearchQueryThenFetchAsyncActionTests extends ESTestCase { SearchPhaseController.ReducedQueryPhase phase = action.results.reduce(); assertThat(phase.numReducePhases(), greaterThanOrEqualTo(1)); if (withScroll) { - assertThat(phase.totalHits().value, equalTo((long) numShards)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo((long) numShards)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); } else { - assertThat(phase.totalHits().value, equalTo(2L)); - assertThat(phase.totalHits().relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(phase.totalHits().value(), equalTo(2L)); + assertThat(phase.totalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); } assertThat(phase.sortedTopDocs().scoreDocs().length, equalTo(1)); assertThat(phase.sortedTopDocs().scoreDocs()[0], instanceOf(FieldDoc.class)); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 2b0ed0552e59..51796f404c28 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -762,11 +762,11 @@ public class SearchResponseMergerTests extends ESTestCase { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } - final int numDocs = totalHits == null || totalHits.value >= requestedSize ? requestedSize : (int) totalHits.value; + final int numDocs = totalHits == null || totalHits.value() >= requestedSize ? requestedSize : (int) totalHits.value(); int scoreFactor = randomIntBetween(1, numResponses); float maxScore = scoreSort ? numDocs * scoreFactor : Float.NaN; SearchHit[] hits = randomSearchHitArray( @@ -862,8 +862,8 @@ public class SearchResponseMergerTests extends ESTestCase { assertNull(searchHits.getTotalHits()); } else { assertNotNull(searchHits.getTotalHits()); - assertEquals(expectedTotalHits.value, searchHits.getTotalHits().value); - assertSame(expectedTotalHits.relation, searchHits.getTotalHits().relation); + assertEquals(expectedTotalHits.value(), searchHits.getTotalHits().value()); + assertSame(expectedTotalHits.relation(), searchHits.getTotalHits().relation()); } if (expectedMaxScore == Float.NEGATIVE_INFINITY) { assertTrue(Float.isNaN(searchHits.getMaxScore())); @@ -910,9 +910,9 @@ public class SearchResponseMergerTests extends ESTestCase { assertEquals(0, response.getNumReducePhases()); assertFalse(response.isTimedOut()); assertNotNull(response.getHits().getTotalHits()); - assertEquals(0, response.getHits().getTotalHits().value); + assertEquals(0, response.getHits().getTotalHits().value()); assertEquals(0, response.getHits().getHits().length); - assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation); + assertEquals(TotalHits.Relation.EQUAL_TO, response.getHits().getTotalHits().relation()); assertNull(response.getScrollId()); assertSame(InternalAggregations.EMPTY, response.getAggregations()); assertNull(response.getSuggest()); @@ -1004,7 +1004,7 @@ public class SearchResponseMergerTests extends ESTestCase { assertEquals(2, merger.numResponses()); SearchResponse mergedResponse = merger.getMergedResponse(clusters); try { - assertEquals(10, mergedResponse.getHits().getTotalHits().value); + assertEquals(10, mergedResponse.getHits().getTotalHits().value()); assertEquals(10, mergedResponse.getHits().getHits().length); assertEquals(2, mergedResponse.getTotalShards()); assertEquals(2, mergedResponse.getSuccessfulShards()); @@ -1032,8 +1032,8 @@ public class SearchResponseMergerTests extends ESTestCase { TotalHits totalHits = null; if (trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED) { totalHits = new TotalHits(randomLongBetween(0, 1000), totalHitsRelation); - long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value; - expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); + long previousValue = expectedTotalHits == null ? 0 : expectedTotalHits.value(); + expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value(), trackTotalHitsUpTo), totalHitsRelation); } SearchResponse searchResponse = new SearchResponse( SearchHits.empty(totalHits, Float.NaN), @@ -1232,7 +1232,7 @@ public class SearchResponseMergerTests extends ESTestCase { SearchResponse mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(2L)); // should be 2 hits from remote1 + assertThat(hits.getTotalHits().value(), equalTo(2L)); // should be 2 hits from remote1 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ { @@ -1273,7 +1273,7 @@ public class SearchResponseMergerTests extends ESTestCase { mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + assertThat(hits.getTotalHits().value(), equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ @@ -1414,7 +1414,7 @@ public class SearchResponseMergerTests extends ESTestCase { mergedResponse = searchResponseMerger.getMergedResponse(clusters); try { SearchHits hits = mergedResponse.getHits(); - assertThat(hits.getTotalHits().value, equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 + assertThat(hits.getTotalHits().value(), equalTo(4L)); // should be 2 hits from remote1, 2 from remote2 SearchHit hit1 = hits.getHits()[0]; String expectedHit1 = """ @@ -1483,7 +1483,7 @@ public class SearchResponseMergerTests extends ESTestCase { private SearchHits createSimpleDeterministicSearchHits(String clusterAlias, Index[] indices) { TotalHits totalHits = new TotalHits(2, TotalHits.Relation.EQUAL_TO); - final int numDocs = (int) totalHits.value; + final int numDocs = (int) totalHits.value(); int scoreFactor = 1; float maxScore = numDocs; int numFields = 1; diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index afc4c6e9eccb..bbeae6b19b8a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -621,8 +621,8 @@ public class SearchResponseTests extends ESTestCase { if (searchResponse.getHits().getTotalHits() == null) { assertNull(deserialized.getHits().getTotalHits()); } else { - assertEquals(searchResponse.getHits().getTotalHits().value, deserialized.getHits().getTotalHits().value); - assertEquals(searchResponse.getHits().getTotalHits().relation, deserialized.getHits().getTotalHits().relation); + assertEquals(searchResponse.getHits().getTotalHits().value(), deserialized.getHits().getTotalHits().value()); + assertEquals(searchResponse.getHits().getTotalHits().relation(), deserialized.getHits().getTotalHits().relation()); } assertEquals(searchResponse.getHits().getHits().length, deserialized.getHits().getHits().length); assertEquals(searchResponse.getNumReducePhases(), deserialized.getNumReducePhases()); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index c00fece68652..1b86e5b00000 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -400,6 +400,6 @@ public abstract class AbstractTermVectorsTestCase extends ESIntegTestCase { ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(1, scoreDocs.length); - return directoryReader.getTermVectors(scoreDocs[0].doc); + return directoryReader.termVectors().get(scoreDocs[0].doc); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index b8804e9160a7..05382de49087 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -9,6 +9,8 @@ package org.elasticsearch.cluster.metadata; +import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.TransportVersion; @@ -602,6 +604,8 @@ public class MetadataCreateIndexServiceTests extends ESTestCase { public void testValidateDotIndex() { List systemIndexDescriptors = new ArrayList<>(); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-one*", "test")); + Automaton patternAutomaton = new RegExp("\\.test-~(one.*)", RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(); + systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".test-~(one*)", "test")); systemIndexDescriptors.add(SystemIndexDescriptorUtils.createUnmanaged(".pattern-test*", "test-1")); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 8158917f0818..9300aa992b68 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.NoDeletionPolicy; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.SoftDeletesRetentionMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.IndexOrDocValuesQuery; @@ -44,7 +45,6 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; -import org.apache.lucene.store.MMapDirectory; import org.apache.lucene.tests.analysis.MockAnalyzer; import org.apache.lucene.tests.index.RandomIndexWriter; import org.apache.lucene.tests.store.MockDirectoryWrapper; @@ -172,10 +172,10 @@ public class LuceneTests extends ESTestCase { assertEquals(3, open.maxDoc()); IndexSearcher s = newSearcher(open); - assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits.value, 1); - assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits.value, 0); + assertEquals(s.search(new TermQuery(new Term("id", "1")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "2")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "3")), 1).totalHits.value(), 1); + assertEquals(s.search(new TermQuery(new Term("id", "4")), 1).totalHits.value(), 0); for (String file : dir.listAll()) { assertFalse("unexpected file: " + file, file.equals("segments_3") || file.startsWith("_2")); @@ -403,11 +403,6 @@ public class LuceneTests extends ESTestCase { throw new UnsupportedOperationException(); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return new ScorerSupplier() { @@ -464,18 +459,6 @@ public class LuceneTests extends ESTestCase { } } - /** - * Test that the "unmap hack" is detected as supported by lucene. - * This works around the following bug: https://bugs.openjdk.java.net/browse/JDK-4724038 - *

    - * While not guaranteed, current status is "Critical Internal API": http://openjdk.java.net/jeps/260 - * Additionally this checks we did not screw up the security logic around the hack. - */ - public void testMMapHackSupported() throws Exception { - // add assume's here if needed for certain platforms, but we should know if it does not work. - assertTrue("MMapDirectory does not support unmapping: " + MMapDirectory.UNMAP_NOT_SUPPORTED_REASON, MMapDirectory.UNMAP_SUPPORTED); - } - public void testWrapAllDocsLive() throws Exception { Directory dir = newDirectory(); IndexWriterConfig config = newIndexWriterConfig().setSoftDeletesField(Lucene.SOFT_DELETES_FIELD) @@ -508,8 +491,9 @@ public class LuceneTests extends ESTestCase { IndexSearcher searcher = newSearcher(reader); Set actualDocs = new HashSet<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); + StoredFields storedFields = reader.storedFields(); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } @@ -554,8 +538,9 @@ public class LuceneTests extends ESTestCase { IndexSearcher searcher = newSearcher(reader); List actualDocs = new ArrayList<>(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); + StoredFields storedFields = reader.storedFields(); for (ScoreDoc scoreDoc : topDocs.scoreDocs) { - actualDocs.add(reader.document(scoreDoc.doc).get("id")); + actualDocs.add(storedFields.document(scoreDoc.doc).get("id")); } assertThat(actualDocs, equalTo(liveDocs)); } diff --git a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java index 8332ff87a9d5..918dcc1bcbd4 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/index/FreqTermsEnumTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -127,8 +128,9 @@ public class FreqTermsEnumTests extends ESTestCase { // now go over each doc, build the relevant references and filter reader = DirectoryReader.open(iw); List filterTerms = new ArrayList<>(); + StoredFields storedFields = reader.storedFields(); for (int docId = 0; docId < reader.maxDoc(); docId++) { - Document doc = reader.document(docId); + Document doc = storedFields.document(docId); addFreqs(doc, referenceAll); if (deletedIds.contains(doc.getField("id").stringValue()) == false) { addFreqs(doc, referenceNotDeleted); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java index 7a8d43ebbfd1..55ca666d8588 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/function/MinScoreScorerTests.java @@ -11,15 +11,11 @@ package org.elasticsearch.common.lucene.search.function; import com.carrotsearch.randomizedtesting.generators.RandomPicks; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConjunctionUtils; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Explanation; -import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.TwoPhaseIterator; -import org.apache.lucene.search.Weight; import org.apache.lucene.tests.search.AssertingScorer; import org.apache.lucene.tests.util.TestUtil; import org.elasticsearch.test.ESTestCase; @@ -66,27 +62,8 @@ public class MinScoreScorerTests extends ESTestCase { }; } - private static Weight fakeWeight() { - return new Weight(new MatchAllDocsQuery()) { - @Override - public Explanation explain(LeafReaderContext context, int doc) throws IOException { - return null; - } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return null; - } - - @Override - public boolean isCacheable(LeafReaderContext ctx) { - return false; - } - }; - } - private static Scorer hideTwoPhaseIterator(Scorer in) { - return new Scorer(in.getWeight()) { + return new Scorer() { @Override public DocIdSetIterator iterator() { return TwoPhaseIterator.asDocIdSetIterator(in.twoPhaseIterator()); @@ -111,7 +88,7 @@ public class MinScoreScorerTests extends ESTestCase { private static Scorer scorer(int maxDoc, final int[] docs, final float[] scores, final boolean twoPhase) { final DocIdSetIterator iterator = twoPhase ? DocIdSetIterator.all(maxDoc) : iterator(docs); - final Scorer scorer = new Scorer(fakeWeight()) { + final Scorer scorer = new Scorer() { int lastScoredDoc = -1; final float matchCost = (random().nextBoolean() ? 1000 : 0) + random().nextInt(2000); @@ -192,7 +169,7 @@ public class MinScoreScorerTests extends ESTestCase { } Scorer scorer = scorer(maxDoc, docs, scores, twoPhase); final float minScore = random().nextFloat(); - Scorer minScoreScorer = new MinScoreScorer(fakeWeight(), scorer, minScore); + Scorer minScoreScorer = new MinScoreScorer(scorer, minScore); int doc = -1; while (doc != DocIdSetIterator.NO_MORE_DOCS) { final int target; @@ -250,7 +227,7 @@ public class MinScoreScorerTests extends ESTestCase { final float minScore; if (randomBoolean()) { minScore = randomFloat(); - MinScoreScorer minScoreScorer = new MinScoreScorer(scorer.getWeight(), scorer, minScore); + MinScoreScorer minScoreScorer = new MinScoreScorer(scorer, minScore); scorers.add(minScoreScorer); } else { scorers.add(scorer); diff --git a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java index b1df24f4db2a..3894efd0b7d4 100644 --- a/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java +++ b/server/src/test/java/org/elasticsearch/common/lucene/search/morelikethis/XMoreLikeThisTests.java @@ -117,7 +117,7 @@ public class XMoreLikeThisTests extends ESTestCase { final double boost10 = ((BooleanQuery) mlt.like("text", new StringReader("lucene|10 release|1"))).clauses() .stream() - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .map(BoostQuery.class::cast) .filter(x -> ((TermQuery) x.getQuery()).getTerm().text().equals("lucene")) .mapToDouble(BoostQuery::getBoost) @@ -125,7 +125,7 @@ public class XMoreLikeThisTests extends ESTestCase { final double boost1 = ((BooleanQuery) mlt.like("text", new StringReader("lucene|1 release|1"))).clauses() .stream() - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .map(BoostQuery.class::cast) .filter(x -> ((TermQuery) x.getQuery()).getTerm().text().equals("lucene")) .mapToDouble(BoostQuery::getBoost) @@ -178,7 +178,7 @@ public class XMoreLikeThisTests extends ESTestCase { expectedTerms[idx++] = new Term("text", text); } for (BooleanClause clause : clauses) { - Term term = ((TermQuery) clause.getQuery()).getTerm(); + Term term = ((TermQuery) clause.query()).getTerm(); assertTrue(Arrays.asList(expectedTerms).contains(term)); } diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java index 6e8eb47035d4..e0e05c84b564 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java @@ -84,12 +84,12 @@ public class SimpleLuceneTests extends ESTestCase { try (IndexReader reader = DirectoryReader.open(indexWriter)) { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - Document doc = searcher.doc(topDocs.scoreDocs[0].doc); + Document doc = searcher.storedFields().document(topDocs.scoreDocs[0].doc); IndexableField f = doc.getField("test"); assertThat(f.numericValue(), equalTo(2)); topDocs = searcher.search(IntPoint.newExactQuery("test", 2), 1); - doc = searcher.doc(topDocs.scoreDocs[0].doc); + doc = searcher.storedFields().document(topDocs.scoreDocs[0].doc); f = doc.getField("test"); assertThat(f.stringValue(), equalTo("2")); } @@ -115,7 +115,7 @@ public class SimpleLuceneTests extends ESTestCase { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); final ArrayList fieldsOrder = new ArrayList<>(); - searcher.doc(topDocs.scoreDocs[0].doc, new StoredFieldVisitor() { + searcher.storedFields().document(topDocs.scoreDocs[0].doc, new StoredFieldVisitor() { @Override public Status needsField(FieldInfo fieldInfo) throws IOException { fieldsOrder.add(fieldInfo.name); diff --git a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java index 97a6faaa5c6f..01c4ac3c6fd6 100644 --- a/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/deps/lucene/VectorHighlighterTests.java @@ -54,7 +54,7 @@ public class VectorHighlighterTests extends ESTestCase { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( @@ -87,7 +87,7 @@ public class VectorHighlighterTests extends ESTestCase { IndexReader reader = searcher.getIndexReader(); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); @@ -143,7 +143,7 @@ public class VectorHighlighterTests extends ESTestCase { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( @@ -170,7 +170,7 @@ public class VectorHighlighterTests extends ESTestCase { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); FastVectorHighlighter highlighter = new FastVectorHighlighter(); String fragment = highlighter.getBestFragment( diff --git a/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java b/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java index 022a6994496a..9419c63f9c48 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetadataStateFormatTests.java @@ -227,7 +227,7 @@ public class MetadataStateFormatTests extends ESTestCase { } long checksumAfterCorruption; long actualChecksumAfterCorruption; - try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) { + try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString())) { assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - 8); // one long is the checksum... 8 bytes checksumAfterCorruption = input.getChecksum(); diff --git a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java index db656b1fc5a9..450d123f551c 100644 --- a/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/PersistedClusterStateServiceTests.java @@ -20,6 +20,7 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -1776,9 +1777,10 @@ public class PersistedClusterStateServiceTests extends ESTestCase { final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); document.add(new StringField(TYPE_FIELD_NAME, typeName, Field.Store.NO)); consumer.accept(document); } diff --git a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java index 7b264ac93511..532a2ff024e8 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexServiceTests.java @@ -290,7 +290,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { // we are running on updateMetadata if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, search.totalHits.value); + assertEquals(1, search.totalHits.value()); } }); assertFalse(refreshTask.isClosed()); @@ -304,7 +304,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { // this one becomes visible due to the force refresh we are running on updateMetadata if the interval changes try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(2, search.totalHits.value); + assertEquals(2, search.totalHits.value()); } }); prepareIndex("test").setId("2").setSource("{\"foo\": \"bar\"}", XContentType.JSON).get(); @@ -312,7 +312,7 @@ public class IndexServiceTests extends ESSingleNodeTestCase { // this one becomes visible due to the scheduled refresh try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(3, search.totalHits.value); + assertEquals(3, search.totalHits.value()); } }); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 10b0b54d2d7e..9e4a19eb039f 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -52,7 +52,7 @@ public class CodecTests extends ESTestCase { assumeTrue("Only when zstd_stored_fields feature flag is enabled", CodecService.ZSTD_STORED_FIELDS_FEATURE_FLAG.isEnabled()); CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMapperCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Elasticsearch816Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Elasticsearch900Codec.class)); } public void testDefault() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java index d43a1e09d71a..12a17f5c263a 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatTests.java @@ -115,7 +115,6 @@ public class ES87TSDBDocValuesFormatTests extends BaseDocValuesFormatTestCase { assertEquals(0, field.nextOrd()); BytesRef scratch = field.lookupOrd(0); assertEquals("value", scratch.utf8ToString()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, field.nextOrd()); } assertEquals(DocIdSetIterator.NO_MORE_DOCS, field.nextDoc()); for (int i = 0; i < NUM_DOCS; i++) { @@ -126,7 +125,6 @@ public class ES87TSDBDocValuesFormatTests extends BaseDocValuesFormatTestCase { BytesRef scratch = fieldN.lookupOrd(0); assertEquals("value" + i, scratch.utf8ToString()); assertEquals(DocIdSetIterator.NO_MORE_DOCS, fieldN.nextDoc()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, fieldN.nextOrd()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java new file mode 100644 index 000000000000..099b59808ef4 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/tsdb/ES87TSDBDocValuesFormatVariableSkipIntervalTests.java @@ -0,0 +1,196 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package org.elasticsearch.index.codec.tsdb; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.apache.lucene.tests.util.TestUtil; + +import java.io.IOException; +import java.util.Arrays; + +/** Tests ES87TSDBDocValuesFormat with custom skipper interval size. */ +public class ES87TSDBDocValuesFormatVariableSkipIntervalTests extends BaseDocValuesFormatTestCase { + + @Override + protected Codec getCodec() { + // small interval size to test with many intervals + return TestUtil.alwaysDocValuesFormat(new ES87TSDBDocValuesFormat(random().nextInt(4, 16))); + } + + public void testSkipIndexIntervalSize() { + IllegalArgumentException ex = expectThrows( + IllegalArgumentException.class, + () -> new ES87TSDBDocValuesFormat(random().nextInt(Integer.MIN_VALUE, 2)) + ); + assertTrue(ex.getMessage().contains("skipIndexIntervalSize must be > 1")); + } + + public void testSkipperAllEqualValue() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int numDocs = atLeast(100); + for (int i = 0; i < numDocs; i++) { + final Document doc = new Document(); + doc.add(NumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + skipper.advance(0); + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs, skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on different value + public void testSkipperFewValuesSorted() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + boolean reverse = random().nextBoolean(); + config.setIndexSort(new Sort(new SortField("dv", SortField.Type.LONG, reverse))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int intervals = random().nextInt(2, 10); + final int[] numDocs = new int[intervals]; + for (int i = 0; i < intervals; i++) { + numDocs[i] = random().nextInt(10) + 16; + for (int j = 0; j < numDocs[i]; j++) { + final Document doc = new Document(); + doc.add(NumericDocValuesField.indexedField("dv", i)); + writer.addDocument(doc); + } + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + if (reverse) { + for (int i = intervals - 1; i >= 0; i--) { + assertEquals(i, skipper.minValue(0)); + assertEquals(i, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + } else { + for (int i = 0; i < intervals; i++) { + assertEquals(i, skipper.minValue(0)); + assertEquals(i, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on empty doc values + public void testSkipperAllEqualValueWithGaps() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int gaps = random().nextInt(2, 10); + final int[] numDocs = new int[gaps]; + long totaldocs = 0; + for (int i = 0; i < gaps; i++) { + numDocs[i] = random().nextInt(10) + 16; + for (int j = 0; j < numDocs[i]; j++) { + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + // add doc with empty "dv" + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + writer.addDocument(doc); + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + for (int i = 0; i < gaps; i++) { + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } + + // break on multi-values + public void testSkipperAllEqualValueWithMultiValues() throws IOException { + final IndexWriterConfig config = new IndexWriterConfig().setCodec(getCodec()); + config.setIndexSort(new Sort(new SortField("sort", SortField.Type.LONG, false))); + try (Directory directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, config)) { + final int gaps = random().nextInt(2, 10); + final int[] numDocs = new int[gaps]; + long totaldocs = 0; + for (int i = 0; i < gaps; i++) { + int docs = random().nextInt(10) + 16; + numDocs[i] += docs; + for (int j = 0; j < docs; j++) { + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + } + if (i != gaps - 1) { + // add doc with mutivalues + final Document doc = new Document(); + doc.add(new NumericDocValuesField("sort", totaldocs++)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + doc.add(SortedNumericDocValuesField.indexedField("dv", 0L)); + writer.addDocument(doc); + numDocs[i + 1] = 1; + } + } + writer.forceMerge(1); + try (IndexReader reader = writer.getReader()) { + assertEquals(1, reader.leaves().size()); + final DocValuesSkipper skipper = reader.leaves().get(0).reader().getDocValuesSkipper("dv"); + assertNotNull(skipper); + assertEquals(Arrays.stream(numDocs).sum(), skipper.docCount()); + skipper.advance(0); + for (int i = 0; i < gaps; i++) { + assertEquals(0L, skipper.minValue(0)); + assertEquals(0L, skipper.maxValue(0)); + assertEquals(numDocs[i], skipper.docCount(0)); + skipper.advance(skipper.maxDocID(0) + 1); + } + assertEquals(DocIdSetIterator.NO_MORE_DOCS, skipper.minDocID(0)); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java index 8f0a306e1eb3..86b60d9984de 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/BaseKnnBitVectorsFormatTestCase.java @@ -19,6 +19,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; @@ -110,8 +111,9 @@ abstract class BaseKnnBitVectorsFormatTestCase extends BaseIndexFileFormatTestCa totalSize += vectorValues.size(); StoredFields storedFields = ctx.reader().storedFields(); int docId; - while ((docId = vectorValues.nextDoc()) != NO_MORE_DOCS) { - byte[] v = vectorValues.vectorValue(); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + while ((docId = iterator.nextDoc()) != NO_MORE_DOCS) { + byte[] v = vectorValues.vectorValue(iterator.index()); assertEquals(dimension, v.length); String idString = storedFields.document(docId).getField("id").stringValue(); int id = Integer.parseInt(idString); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java index aa50bc26c444..57cca6eea86e 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813FlatVectorFormatTests.java @@ -11,7 +11,7 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase { @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java index 8cb927036588..9069b094ee48 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES813Int8FlatVectorFormatTests.java @@ -11,7 +11,7 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.tests.index.BaseKnnVectorsFormatTestCase; import org.elasticsearch.common.logging.LogConfigurator; @@ -24,7 +24,7 @@ public class ES813Int8FlatVectorFormatTests extends BaseKnnVectorsFormatTestCase @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES813Int8FlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java index cee60efb5732..549a14ca6c31 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES814HnswScalarQuantizedVectorsFormatTests.java @@ -11,7 +11,7 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.KnnFloatVectorField; @@ -19,6 +19,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.VectorSimilarityFunction; @@ -41,7 +42,7 @@ public class ES814HnswScalarQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES814HnswScalarQuantizedVectorsFormat(); @@ -68,9 +69,10 @@ public class ES814HnswScalarQuantizedVectorsFormatTests extends BaseKnnVectorsFo try (IndexReader reader = DirectoryReader.open(w2)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); - assertEquals(0, vectorValues.nextDoc()); - assertEquals(0, vectorValues.vectorValue()[0], 0); - assertEquals(NO_MORE_DOCS, vectorValues.nextDoc()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertEquals(0, vectorValues.vectorValue(iterator.index())[0], 0); + assertEquals(NO_MORE_DOCS, iterator.nextDoc()); } } } @@ -110,12 +112,13 @@ public class ES814HnswScalarQuantizedVectorsFormatTests extends BaseKnnVectorsFo try (IndexReader reader = DirectoryReader.open(w2)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); - assertEquals(0, vectorValues.nextDoc()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); // The merge order is randomized, we might get 1 first, or 2 - float value = vectorValues.vectorValue()[0]; + float value = vectorValues.vectorValue(iterator.index())[0]; assertTrue(value == 1 || value == 2); - assertEquals(1, vectorValues.nextDoc()); - value += vectorValues.vectorValue()[0]; + assertEquals(1, iterator.nextDoc()); + value += vectorValues.vectorValue(iterator.index())[0]; assertEquals(3f, value, 0); } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java index 90d2584feb3f..034d428b2520 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815BitFlatVectorFormatTests.java @@ -11,7 +11,7 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815BitFlatVectorFormatTests extends BaseKnnBitVectorsFormatTestCa @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815BitFlatVectorFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java index add90ea271fa..4af6a405c770 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES815HnswBitVectorsFormatTests.java @@ -11,7 +11,7 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.index.VectorSimilarityFunction; import org.junit.Before; @@ -19,7 +19,7 @@ public class ES815HnswBitVectorsFormatTests extends BaseKnnBitVectorsFormatTestC @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES815HnswBitVectorsFormat(); diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java index 04d4ef2079b9..cef5e5358f3d 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryFlatVectorsScorerTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.index.VectorSimilarityFunction; +import org.apache.lucene.search.VectorScorer; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.VectorUtil; import org.elasticsearch.common.logging.LogConfigurator; @@ -61,7 +62,7 @@ public class ES816BinaryFlatVectorsScorerTests extends LuceneTestCase { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) throws IOException { return random().nextFloat(0f, 1000f); @@ -99,7 +100,7 @@ public class ES816BinaryFlatVectorsScorerTests extends LuceneTestCase { } @Override - public RandomAccessBinarizedByteVectorValues copy() throws IOException { + public BinarizedByteVectorValues copy() throws IOException { return null; } @@ -115,6 +116,16 @@ public class ES816BinaryFlatVectorsScorerTests extends LuceneTestCase { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; @@ -209,7 +220,7 @@ public class ES816BinaryFlatVectorsScorerTests extends LuceneTestCase { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, 0f, 0f) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) { return 355.78073f; @@ -375,7 +386,7 @@ public class ES816BinaryFlatVectorsScorerTests extends LuceneTestCase { } @Override - public RandomAccessBinarizedByteVectorValues copy() { + public BinarizedByteVectorValues copy() { return null; } @@ -389,6 +400,16 @@ public class ES816BinaryFlatVectorsScorerTests extends LuceneTestCase { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; @@ -806,7 +827,7 @@ public class ES816BinaryFlatVectorsScorerTests extends LuceneTestCase { new BinaryQuantizer.QueryFactors(quantizedSum, distanceToCentroid, vl, width, normVmC, vDotC) ); - RandomAccessBinarizedByteVectorValues targetVectors = new RandomAccessBinarizedByteVectorValues() { + BinarizedByteVectorValues targetVectors = new BinarizedByteVectorValues() { @Override public float getCentroidDistance(int vectorOrd) { return 0f; @@ -1617,7 +1638,7 @@ public class ES816BinaryFlatVectorsScorerTests extends LuceneTestCase { } @Override - public RandomAccessBinarizedByteVectorValues copy() { + public BinarizedByteVectorValues copy() { return null; } @@ -1727,6 +1748,16 @@ public class ES816BinaryFlatVectorsScorerTests extends LuceneTestCase { return 1; } + @Override + public VectorScorer scorer(float[] query) throws IOException { + return null; + } + + @Override + public float[] getCorrectiveTerms(int vectorOrd) throws IOException { + return new float[0]; + } + @Override public int dimension() { return dimensions; diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java index 0892436891ff..42f2fbb383ac 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816BinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.index.DirectoryReader; @@ -30,6 +30,7 @@ import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.IndexSearcher; @@ -58,7 +59,7 @@ public class ES816BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormat @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816BinaryQuantizedVectorsFormat(); @@ -90,8 +91,8 @@ public class ES816BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormat float[] queryVector = randomVector(dims); Query q = new KnnFloatVectorQuery(fieldName, queryVector, k); TopDocs collectedDocs = searcher.search(q, k); - assertEquals(k, collectedDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, collectedDocs.totalHits.relation); + assertEquals(k, collectedDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, collectedDocs.totalHits.relation()); } } } @@ -148,7 +149,7 @@ public class ES816BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormat LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues(fieldName); assertEquals(vectorValues.size(), numVectors); - OffHeapBinarizedVectorValues qvectorValues = ((ES816BinaryQuantizedVectorsReader.BinarizedVectorValues) vectorValues) + BinarizedByteVectorValues qvectorValues = ((ES816BinaryQuantizedVectorsReader.BinarizedVectorValues) vectorValues) .getQuantizedVectorValues(); float[] centroid = qvectorValues.getCentroid(); assertEquals(centroid.length, dims); @@ -159,13 +160,18 @@ public class ES816BinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFormat if (similarityFunction == VectorSimilarityFunction.COSINE) { vectorValues = new ES816BinaryQuantizedVectorsWriter.NormalizedFloatVectorValues(vectorValues); } + KnnVectorValues.DocIndexIterator docIndexIterator = vectorValues.iterator(); - while (vectorValues.nextDoc() != NO_MORE_DOCS) { - float[] corrections = quantizer.quantizeForIndex(vectorValues.vectorValue(), expectedVector, centroid); - assertArrayEquals(expectedVector, qvectorValues.vectorValue()); - assertEquals(corrections.length, qvectorValues.getCorrectiveTerms().length); + while (docIndexIterator.nextDoc() != NO_MORE_DOCS) { + float[] corrections = quantizer.quantizeForIndex( + vectorValues.vectorValue(docIndexIterator.index()), + expectedVector, + centroid + ); + assertArrayEquals(expectedVector, qvectorValues.vectorValue(docIndexIterator.index())); + assertEquals(corrections.length, qvectorValues.getCorrectiveTerms(docIndexIterator.index()).length); for (int i = 0; i < corrections.length; i++) { - assertEquals(corrections[i], qvectorValues.getCorrectiveTerms()[i], 0.00001f); + assertEquals(corrections[i], qvectorValues.getCorrectiveTerms(docIndexIterator.index())[i], 0.00001f); } } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java index f607de57e1fd..ca96e093b7b2 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/vectors/ES816HnswBinaryQuantizedVectorsFormatTests.java @@ -22,7 +22,7 @@ package org.elasticsearch.index.codec.vectors; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FilterCodec; import org.apache.lucene.codecs.KnnVectorsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsReader; import org.apache.lucene.document.Document; import org.apache.lucene.document.KnnFloatVectorField; @@ -30,6 +30,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.VectorSimilarityFunction; import org.apache.lucene.search.TopDocs; @@ -55,7 +56,7 @@ public class ES816HnswBinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFo @Override protected Codec getCodec() { - return new Lucene912Codec() { + return new Lucene100Codec() { @Override public KnnVectorsFormat getKnnVectorsFormatForField(String field) { return new ES816HnswBinaryQuantizedVectorsFormat(); @@ -91,12 +92,13 @@ public class ES816HnswBinaryQuantizedVectorsFormatTests extends BaseKnnVectorsFo try (IndexReader reader = DirectoryReader.open(w)) { LeafReader r = getOnlyLeafReader(reader); FloatVectorValues vectorValues = r.getFloatVectorValues("f"); + KnnVectorValues.DocIndexIterator docIndexIterator = vectorValues.iterator(); assert (vectorValues.size() == 1); - while (vectorValues.nextDoc() != NO_MORE_DOCS) { - assertArrayEquals(vector, vectorValues.vectorValue(), 0.00001f); + while (docIndexIterator.nextDoc() != NO_MORE_DOCS) { + assertArrayEquals(vector, vectorValues.vectorValue(docIndexIterator.index()), 0.00001f); } TopDocs td = r.searchNearestVectors("f", randomVector(vector.length), 1, null, Integer.MAX_VALUE); - assertEquals(1, td.totalHits.value); + assertEquals(1, td.totalHits.value()); assertTrue(td.scoreDocs[0].score >= 0); } } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java index c3fea6c7a189..437ba1cecc11 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/StoredFieldCodecDuelTests.java @@ -10,7 +10,7 @@ package org.elasticsearch.index.codec.zstd; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; @@ -35,13 +35,13 @@ public class StoredFieldCodecDuelTests extends ESTestCase { private static final String DOUBLE_FIELD = "double_field_5"; public void testDuelBestSpeed() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } public void testDuelBestCompression() throws IOException { - var baseline = new LegacyPerFieldMapperCodec(Lucene912Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); + var baseline = new LegacyPerFieldMapperCodec(Lucene100Codec.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); var contender = new PerFieldMapperCodec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION, null, BigArrays.NON_RECYCLING_INSTANCE); doTestDuel(baseline, contender); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java index 71c7464657e7..77a7585e3b51 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestCompressionStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ package org.elasticsearch.index.codec.zstd; import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch816Codec; +import org.elasticsearch.index.codec.Elasticsearch900Codec; public class Zstd814BestCompressionStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); + private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_COMPRESSION); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java index 02a1b1069790..3d6cfea70d12 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/zstd/Zstd814BestSpeedStoredFieldsFormatTests.java @@ -11,11 +11,11 @@ package org.elasticsearch.index.codec.zstd; import org.apache.lucene.codecs.Codec; import org.apache.lucene.tests.index.BaseStoredFieldsFormatTestCase; -import org.elasticsearch.index.codec.Elasticsearch816Codec; +import org.elasticsearch.index.codec.Elasticsearch900Codec; public class Zstd814BestSpeedStoredFieldsFormatTests extends BaseStoredFieldsFormatTestCase { - private final Codec codec = new Elasticsearch816Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); + private final Codec codec = new Elasticsearch900Codec(Zstd814StoredFieldsFormat.Mode.BEST_SPEED); @Override protected Codec getCodec() { diff --git a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java index 6565a11a860e..6d205a22433b 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/CompletionStatsCacheTests.java @@ -9,7 +9,7 @@ package org.elasticsearch.index.engine; import org.apache.lucene.codecs.PostingsFormat; -import org.apache.lucene.codecs.lucene912.Lucene912Codec; +import org.apache.lucene.codecs.lucene100.Lucene100Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -45,7 +45,7 @@ public class CompletionStatsCacheTests extends ESTestCase { public void testCompletionStatsCache() throws IOException, InterruptedException { final IndexWriterConfig indexWriterConfig = newIndexWriterConfig(); final PostingsFormat postingsFormat = new Completion912PostingsFormat(); - indexWriterConfig.setCodec(new Lucene912Codec() { + indexWriterConfig.setCodec(new Lucene100Codec() { @Override public PostingsFormat getPostingsFormatForField(String field) { return postingsFormat; // all fields are suggest fields diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index c8ca3d17de79..21aefd893de7 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -266,7 +266,7 @@ public class InternalEngineTests extends EngineTestCase { try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { assertEquals(1, searcher.getIndexReader().numDocs()); TopDocs search = searcher.search(new MatchAllDocsQuery(), 1); - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[0].doc); assertEquals("test", luceneDoc.get("value")); } @@ -279,7 +279,7 @@ public class InternalEngineTests extends EngineTestCase { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { assertEquals(1, searcher.getIndexReader().numDocs()); TopDocs search = searcher.search(new MatchAllDocsQuery(), 1); - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[0].doc); assertEquals("updated", luceneDoc.get("value")); } @@ -640,7 +640,7 @@ public class InternalEngineTests extends EngineTestCase { recoverFromTranslog(recoveringEngine, translogHandler, Long.MAX_VALUE); recoveringEngine.refresh("test"); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(operations.get(operations.size() - 1) instanceof Engine.Delete ? 0 : 1)); } } @@ -747,7 +747,7 @@ public class InternalEngineTests extends EngineTestCase { recoveringEngine.refresh("test"); try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), docs); - assertEquals(docs, topDocs.totalHits.value); + assertEquals(docs, topDocs.totalHits.value()); } } finally { IOUtils.close(initialEngine, recoveringEngine, store); @@ -2010,7 +2010,7 @@ public class InternalEngineTests extends EngineTestCase { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValueDoc1)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2019,7 +2019,7 @@ public class InternalEngineTests extends EngineTestCase { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValueDoc2)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2249,7 +2249,7 @@ public class InternalEngineTests extends EngineTestCase { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { Integer totalHits = searcher.search( new TermQuery(new Term("value", lastFieldValue)), - new TotalHitCountCollectorManager() + new TotalHitCountCollectorManager(searcher.getSlices()) ); assertThat(totalHits, equalTo(1)); } @@ -2275,7 +2275,10 @@ public class InternalEngineTests extends EngineTestCase { assertVisibleCount(engine, docDeleted ? 0 : 1); if (docDeleted == false) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } @@ -2361,7 +2364,10 @@ public class InternalEngineTests extends EngineTestCase { if (docDeleted == false) { logger.info("searching for [{}]", lastFieldValue); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } @@ -2378,7 +2384,7 @@ public class InternalEngineTests extends EngineTestCase { final int opsOnPrimary = assertOpsOnPrimary(primaryOps, finalReplicaVersion, deletedOnReplica, replicaEngine); final long currentSeqNo = getSequenceID(replicaEngine, new Engine.Get(false, false, Term.toString(lastReplicaOp.uid()))).v1(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); if (totalHits > 0) { // last op wasn't delete assertThat(currentSeqNo, equalTo(finalReplicaSeqNo + opsOnPrimary)); @@ -2402,7 +2408,10 @@ public class InternalEngineTests extends EngineTestCase { assertVisibleCount(engine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } @@ -2434,7 +2443,7 @@ public class InternalEngineTests extends EngineTestCase { Engine.Get engineGet = new Engine.Get(true, false, doc.id()); try (Engine.GetResult get = engine.get(engineGet, mappingLookup, documentParser, randomSearcherWrapper())) { FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = new ArrayList<>(Strings.commaDelimitedListToSet(visitor.source().utf8ToString())); String removed = op % 3 == 0 && values.size() > 0 ? values.remove(0) : null; String added = "v_" + idGenerator.incrementAndGet(); @@ -2480,7 +2489,7 @@ public class InternalEngineTests extends EngineTestCase { Engine.GetResult get = engine.get(new Engine.Get(true, false, doc.id()), mappingLookup, documentParser, randomSearcherWrapper()) ) { FieldsVisitor visitor = new FieldsVisitor(true); - get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor); + get.docIdAndVersion().reader.storedFields().document(get.docIdAndVersion().docId, visitor); List values = Arrays.asList(Strings.commaDelimitedListToStringArray(visitor.source().utf8ToString())); assertThat(currentValues, equalTo(new HashSet<>(values))); } @@ -3424,7 +3433,7 @@ public class InternalEngineTests extends EngineTestCase { engine.skipTranslogRecovery(); try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10)); - assertThat(topDocs.totalHits.value, equalTo(0L)); + assertThat(topDocs.totalHits.value(), equalTo(0L)); } } } @@ -3514,7 +3523,7 @@ public class InternalEngineTests extends EngineTestCase { assertThat(result.getVersion(), equalTo(2L)); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value(), equalTo(numDocs + 1L)); } engine.close(); @@ -3523,7 +3532,7 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("warm_up"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1); - assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L)); + assertThat(topDocs.totalHits.value(), equalTo(numDocs + 1L)); } assertEquals(flush ? 1 : 2, translogHandler.appliedOperations()); engine.delete(new Engine.Delete(Integer.toString(randomId), newUid(doc), primaryTerm.get())); @@ -3534,7 +3543,7 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs); - assertThat(topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(topDocs.totalHits.value(), equalTo((long) numDocs)); } } @@ -3890,7 +3899,7 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } operation = appendOnlyPrimary(doc, false, 1, create); retry = appendOnlyPrimary(doc, true, 1, create); @@ -3925,7 +3934,7 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -3983,7 +3992,7 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); } } @@ -4007,7 +4016,7 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } final boolean create = randomBoolean(); @@ -4047,7 +4056,7 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -4092,12 +4101,12 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) { List ops = readAllOperationsInLucene(engine); @@ -4172,7 +4181,7 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } index = new Engine.Index( @@ -4194,7 +4203,7 @@ public class InternalEngineTests extends EngineTestCase { replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -4264,7 +4273,7 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } Engine.Index secondIndexRequestReplica = new Engine.Index( @@ -4285,7 +4294,7 @@ public class InternalEngineTests extends EngineTestCase { replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); } } @@ -5678,7 +5687,7 @@ public class InternalEngineTests extends EngineTestCase { try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { TopDocs search = searcher.search(new MatchAllDocsQuery(), searcher.getIndexReader().numDocs()); for (int i = 0; i < search.scoreDocs.length; i++) { - org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[i].doc); + org.apache.lucene.document.Document luceneDoc = searcher.storedFields().document(search.scoreDocs[i].doc); assertEquals("updated", luceneDoc.get("value")); } int totalNumDocs = numDocs - numDeletes.get(); @@ -6666,7 +6675,7 @@ public class InternalEngineTests extends EngineTestCase { engine.refresh("test"); try (Engine.Searcher searcher = engine.acquireSearcher("test")) { LeafReader leafReader = getOnlyLeafReader(searcher.getIndexReader()); - assertEquals(createdVersion.luceneVersion().major, leafReader.getMetaData().getCreatedVersionMajor()); + assertEquals(createdVersion.luceneVersion().major, leafReader.getMetaData().createdVersionMajor()); } } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java index a3a21fc32e54..b6be13b9f251 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java @@ -13,7 +13,6 @@ import org.apache.lucene.tests.util.RamUsageTester; import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.Constants; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Tuple; @@ -71,22 +70,16 @@ public class LiveVersionMapTests extends ESTestCase { } actualRamBytesUsed = RamUsageTester.ramUsed(map); estimatedRamBytesUsed = map.ramBytesUsed(); - long tolerance; - if (Constants.JRE_IS_MINIMUM_JAVA9) { - // With Java 9, RamUsageTester computes the memory usage of maps as - // the memory usage of an array that would contain exactly all keys - // and values. This is an under-estimation of the actual memory - // usage since it ignores the impact of the load factor and of the - // linked list/tree that is used to resolve collisions. So we use a - // bigger tolerance. - // less than 50% off - tolerance = actualRamBytesUsed / 2; - } else { - // Java 8 is more accurate by doing reflection into the actual JDK classes - // so we give it a lower error bound. - // less than 25% off - tolerance = actualRamBytesUsed / 4; - } + + // Since Java 9, RamUsageTester computes the memory usage of maps as + // the memory usage of an array that would contain exactly all keys + // and values. This is an under-estimation of the actual memory + // usage since it ignores the impact of the load factor and of the + // linked list/tree that is used to resolve collisions. So we use a + // bigger tolerance. + // less than 50% off + long tolerance = actualRamBytesUsed / 2; + assertEquals(actualRamBytesUsed, estimatedRamBytesUsed, tolerance); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java index e7e668415cdd..c0e365909429 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/RecoverySourcePruneMergePolicyTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.MatchAllDocsQuery; @@ -69,8 +70,9 @@ public class RecoverySourcePruneMergePolicyTests extends ESTestCase { writer.forceMerge(1); writer.commit(); try (DirectoryReader reader = DirectoryReader.open(writer)) { + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); if (pruneIdField) { assertEquals(1, document.getFields().size()); assertEquals("source", document.getFields().get(0).name()); @@ -151,8 +153,9 @@ public class RecoverySourcePruneMergePolicyTests extends ESTestCase { assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("even")); @@ -192,8 +195,9 @@ public class RecoverySourcePruneMergePolicyTests extends ESTestCase { assertEquals(1, reader.leaves().size()); NumericDocValues extra_source = reader.leaves().get(0).reader().getNumericDocValues("extra_source"); assertNotNull(extra_source); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < reader.maxDoc(); i++) { - Document document = reader.document(i); + Document document = storedFields.document(i); Set collect = document.getFields().stream().map(IndexableField::name).collect(Collectors.toSet()); assertTrue(collect.contains("source")); assertTrue(collect.contains("extra_source")); diff --git a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java index a6e56c413702..49036324e722 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/SegmentTests.java @@ -80,7 +80,7 @@ public class SegmentTests extends ESTestCase { segment.sizeInBytes = randomNonNegativeLong(); segment.docCount = randomIntBetween(1, Integer.MAX_VALUE); segment.delDocCount = randomIntBetween(0, segment.docCount); - segment.version = Version.LUCENE_8_0_0; + segment.version = Version.LUCENE_9_0_0; segment.compound = randomBoolean(); segment.mergeId = randomAlphaOfLengthBetween(1, 10); segment.segmentSort = randomIndexSort(); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index b7793a644f8b..9d0a9cdeb196 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -113,7 +113,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes TopFieldDocs topDocs; SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(toString(((FieldDoc) topDocs.scoreDocs[0]).fields[0]), equalTo(one())); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); @@ -123,7 +123,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs[0].doc, equalTo(2)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); assertThat(topDocs.scoreDocs[2].doc, equalTo(1)); @@ -193,14 +193,14 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes IndexSearcher searcher = newIndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(1)); assertThat(topDocs.scoreDocs[1].doc, equalTo(0)); assertThat(topDocs.scoreDocs[2].doc, equalTo(2)); sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(3L)); + assertThat(topDocs.totalHits.value(), equalTo(3L)); assertThat(topDocs.scoreDocs.length, equalTo(3)); assertThat(topDocs.scoreDocs[0].doc, equalTo(0)); assertThat(topDocs.scoreDocs[1].doc, equalTo(2)); @@ -258,7 +258,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes IndexSearcher searcher = newIndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(7)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("!08")); @@ -279,7 +279,7 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes sortField = indexFieldData.sortField(null, MultiValueMode.MAX, null, true); topDocs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(sortField)); - assertThat(topDocs.totalHits.value, equalTo(8L)); + assertThat(topDocs.totalHits.value(), equalTo(8L)); assertThat(topDocs.scoreDocs.length, equalTo(8)); assertThat(topDocs.scoreDocs[0].doc, equalTo(6)); assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[0]).fields[0]).utf8ToString(), equalTo("10")); diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index ac77f147a7ce..48d6cabefe34 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -262,10 +262,10 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField) ); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); BytesRef previousValue = reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { - final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); + final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); final BytesRef value = new BytesRef(docValue == null ? missingValue : docValue); if (reverse) { assertTrue(previousValue.compareTo(value) >= 0); @@ -321,10 +321,10 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI new Sort(sortField) ); - assertThat(topDocs.totalHits.value, lessThanOrEqualTo((long) numDocs)); + assertThat(topDocs.totalHits.value(), lessThanOrEqualTo((long) numDocs)); BytesRef previousValue = first ? null : reverse ? UnicodeUtil.BIG_TERM : new BytesRef(); for (int i = 0; i < topDocs.scoreDocs.length; ++i) { - final String docValue = searcher.doc(topDocs.scoreDocs[i].doc).get("value"); + final String docValue = searcher.storedFields().document(topDocs.scoreDocs[i].doc).get("value"); if (first && docValue == null) { assertNull(previousValue); } else if (first == false && docValue != null) { @@ -414,7 +414,7 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI assertTrue("expected " + docID + " to be a parent", parents.get(docID)); BytesRef cmpValue = null; for (int child = parents.prevSetBit(docID - 1) + 1; child < docID; ++child) { - String[] sVals = searcher.doc(child).getValues("text"); + String[] sVals = searcher.storedFields().document(child).getValues("text"); final BytesRef[] vals; if (sVals.length == 0) { vals = new BytesRef[0]; @@ -498,15 +498,11 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI ord = values.nextOrd(); assertThat(ord, equalTo(5L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("04")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertFalse(values.advanceExact(1)); assertTrue(values.advanceExact(2)); ord = values.nextOrd(); assertThat(ord, equalTo(4L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("03")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Second segment leaf = topLevelReader.leaves().get(1); @@ -522,8 +518,6 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI ord = values.nextOrd(); assertThat(ord, equalTo(7L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("06")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertTrue(values.advanceExact(1)); ord = values.nextOrd(); assertThat(ord, equalTo(7L)); @@ -534,8 +528,6 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI ord = values.nextOrd(); assertThat(ord, equalTo(9L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("08")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); assertFalse(values.advanceExact(2)); assertTrue(values.advanceExact(3)); ord = values.nextOrd(); @@ -547,8 +539,6 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI ord = values.nextOrd(); assertThat(ord, equalTo(11L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("10")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Third segment leaf = topLevelReader.leaves().get(2); @@ -564,8 +554,6 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI ord = values.nextOrd(); assertThat(ord, equalTo(2L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("!10")); - ord = values.nextOrd(); - assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); } public void testTermsEnum() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java index cb6732ce8bb7..aa23dc6da19d 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java @@ -108,7 +108,6 @@ public class MultiOrdinalsTests extends ESTestCase { for (Long ord : docOrds) { assertThat(docs.nextOrd(), equalTo(ord)); } - assertEquals(SortedSetDocValues.NO_MORE_ORDS, docs.nextOrd()); } for (int i = docId + 1; i < ordAndId.id; i++) { assertFalse(singleOrds.advanceExact(i)); @@ -257,7 +256,6 @@ public class MultiOrdinalsTests extends ESTestCase { for (long ord : ords) { assertThat(docs.nextOrd(), equalTo(ord)); } - assertThat(docs.nextOrd(), equalTo(SortedSetDocValues.NO_MORE_ORDS)); } } } diff --git a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java index 86ddbbbc9759..81be71aec23c 100644 --- a/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java +++ b/server/src/test/java/org/elasticsearch/index/fieldstats/FieldStatsProviderRefreshTests.java @@ -47,7 +47,7 @@ public class FieldStatsProviderRefreshTests extends ESSingleNodeTestCase { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r1 -> assertThat(r1.getHits().getTotalHits().value, equalTo(3L)) + r1 -> assertThat(r1.getHits().getTotalHits().value(), equalTo(3L)) ); assertRequestCacheStats(0, 1); @@ -57,7 +57,7 @@ public class FieldStatsProviderRefreshTests extends ESSingleNodeTestCase { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r2 -> assertThat(r2.getHits().getTotalHits().value, equalTo(3L)) + r2 -> assertThat(r2.getHits().getTotalHits().value(), equalTo(3L)) ); assertRequestCacheStats(1, 1); @@ -72,7 +72,7 @@ public class FieldStatsProviderRefreshTests extends ESSingleNodeTestCase { .setSearchType(SearchType.QUERY_THEN_FETCH) .setSize(0) .setQuery(QueryBuilders.rangeQuery("s").gte("a").lte("g")), - r3 -> assertThat(r3.getHits().getTotalHits().value, equalTo(5L)) + r3 -> assertThat(r3.getHits().getTotalHits().value(), equalTo(5L)) ); assertRequestCacheStats(1, 2); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index ddba993fd41c..4aa983a78b07 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -121,8 +122,15 @@ public class BooleanScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeT BooleanScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [false]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [true]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [false]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [true]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java index 2c78f5f7fee2..f55d213bea58 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleIndexingDocTests.java @@ -38,25 +38,25 @@ public class DoubleIndexingDocTests extends MapperServiceTestCase { }, reader -> { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(mapperService.fieldType("field1").termQuery("value1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field2").termQuery("1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field3").termQuery("1.1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field4").termQuery("2010-01-01", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("1", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("2", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); topDocs = searcher.search(mapperService.fieldType("field5").termQuery("3", context), 10); - assertThat(topDocs.totalHits.value, equalTo(2L)); + assertThat(topDocs.totalHits.value(), equalTo(2L)); }); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java index 48879cdd0d77..140137015d98 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DoubleScriptFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -120,9 +121,19 @@ public class DoubleScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTe DoubleScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1.1]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2.1]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4.2]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [1.1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [2.1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [4.2]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index 4284bc00cfc1..0182da8ade48 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -78,5 +78,4 @@ public class FieldNamesFieldMapperTests extends MetadataMapperTestCase { ex.getMessage() ); } - } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java index 72055940b897..4cc447d97291 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IdFieldTypeTests.java @@ -19,6 +19,8 @@ import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.test.ESTestCase; import org.mockito.Mockito; +import java.util.List; + public class IdFieldTypeTests extends ESTestCase { public void testRangeQuery() { @@ -49,7 +51,7 @@ public class IdFieldTypeTests extends ESTestCase { Mockito.when(context.indexVersionCreated()).thenReturn(IndexVersion.current()); MappedFieldType ft = new ProvidedIdFieldMapper.IdFieldType(() -> false); Query query = ft.termQuery("id", context); - assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query); + assertEquals(new TermInSetQuery("_id", List.of(Uid.encodeId("id"))), query); } public void testIsAggregatable() { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java index 4170adf0a850..8f209fb78fc6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpPrefixAutomatonUtilTests.java @@ -230,13 +230,8 @@ public class IpPrefixAutomatonUtilTests extends ESTestCase { } private static CompiledAutomaton compileAutomaton(Automaton automaton) { - CompiledAutomaton compiledAutomaton = new CompiledAutomaton( - automaton, - null, - false, - Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, - true - ); + automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + CompiledAutomaton compiledAutomaton = new CompiledAutomaton(automaton, false, false, true); return compiledAutomaton; } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java index acbfe8b8f9b3..281d2993fa29 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -125,16 +126,17 @@ public class IpScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase { BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); + StoredFields storedFields = reader.storedFields(); assertThat( - reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.1\"]}") ); assertThat( - reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.2\"]}") ); assertThat( - reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"192.168.0.4\"]}") ); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java index b4c7ea0ed950..e3bdb3d45818 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldTypeTests.java @@ -110,7 +110,7 @@ public class KeywordFieldTypeTests extends FieldTypeTestCase { public void testTermsQuery() { MappedFieldType ft = new KeywordFieldType("field"); - BytesRef[] terms = new BytesRef[] { new BytesRef("foo"), new BytesRef("bar") }; + List terms = List.of(new BytesRef("foo"), new BytesRef("bar")); assertEquals(new TermInSetQuery("field", terms), ft.termsQuery(Arrays.asList("foo", "bar"), MOCK_CONTEXT)); MappedFieldType ft2 = new KeywordFieldType("field", false, true, Map.of()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java index f2e788918010..57d52991a644 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordScriptFieldTypeTests.java @@ -13,6 +13,7 @@ import org.apache.lucene.document.StoredField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; @@ -115,9 +116,19 @@ public class KeywordScriptFieldTypeTests extends AbstractScriptFieldTypeTestCase BinaryScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"a\"]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"b\"]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [\"d\"]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"a\"]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"b\"]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [\"d\"]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java index 40357399cab5..a8cb4d51c5ef 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/LongScriptFieldTypeTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.Collector; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; @@ -132,9 +133,19 @@ public class LongScriptFieldTypeTests extends AbstractNonTextScriptFieldTypeTest LongScriptFieldData ifd = simpleMappedFieldType().fielddataBuilder(mockFielddataContext()).build(null, null); SortField sf = ifd.sortField(null, MultiValueMode.MIN, null, false); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 3, new Sort(sf)); - assertThat(reader.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [1]}")); - assertThat(reader.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [2]}")); - assertThat(reader.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), equalTo("{\"foo\": [4]}")); + StoredFields storedFields = reader.storedFields(); + assertThat( + storedFields.document(docs.scoreDocs[0].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [1]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[1].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [2]}") + ); + assertThat( + storedFields.document(docs.scoreDocs[2].doc).getBinaryValue("_source").utf8ToString(), + equalTo("{\"foo\": [4]}") + ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java index 5360215b5b05..836b791af23c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/StoredNumericValuesTests.java @@ -80,7 +80,7 @@ public class StoredNumericValuesTests extends MapperServiceTestCase { "field10" ); CustomFieldsVisitor fieldsVisitor = new CustomFieldsVisitor(fieldNames, false); - searcher.doc(0, fieldsVisitor); + searcher.storedFields().document(0, fieldsVisitor); fieldsVisitor.postProcess(mapperService::fieldType); assertThat(fieldsVisitor.fields().size(), equalTo(10)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java index c8fcf486068c..86914cfe9ced 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TextFieldMapperTests.java @@ -879,7 +879,7 @@ public class TextFieldMapperTests extends MapperTestCase { IndexSearcher searcher = newSearcher(ir); MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "Prio 1"); TopDocs td = searcher.search(queryBuilder.toQuery(searchExecutionContext), 1); - assertEquals(1, td.totalHits.value); + assertEquals(1, td.totalHits.value()); }); Exception e = expectThrows( diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java index 5a34886d73db..c8d7ad8127b5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedFieldTypeTests.java @@ -81,7 +81,7 @@ public class KeyedFlattenedFieldTypeTests extends FieldTypeTestCase { public void testTermsQuery() { KeyedFlattenedFieldType ft = createFieldType(); - Query expected = new TermInSetQuery(ft.name(), new BytesRef("key\0value1"), new BytesRef("key\0value2")); + Query expected = new TermInSetQuery(ft.name(), List.of(new BytesRef("key\0value1"), new BytesRef("key\0value2"))); List terms = new ArrayList<>(); terms.add("value1"); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java index f494af259c50..b52192d6e47b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/flattened/KeyedFlattenedLeafFieldDataTests.java @@ -23,8 +23,6 @@ import org.junit.Before; import java.io.IOException; -import static org.apache.lucene.index.SortedSetDocValues.NO_MORE_ORDS; - public class KeyedFlattenedLeafFieldDataTests extends ESTestCase { private LeafOrdinalsFieldData delegate; @@ -121,7 +119,8 @@ public class KeyedFlattenedLeafFieldDataTests extends ESTestCase { docValues.advanceExact(0); int retrievedOrds = 0; - for (long ord = docValues.nextOrd(); ord != NO_MORE_ORDS; ord = docValues.nextOrd()) { + for (int i = 0; i < docValues.docValueCount(); i++) { + long ord = docValues.nextOrd(); assertTrue(0 <= ord && ord < 10); retrievedOrds++; @@ -190,9 +189,7 @@ public class KeyedFlattenedLeafFieldDataTests extends ESTestCase { @Override public long nextOrd() { - if (index == documentOrds.length) { - return NO_MORE_ORDS; - } + assertTrue(index < documentOrds.length); return documentOrds[index++]; } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java index b2ffb779be00..de4ab0bc5df3 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/DenormalizedCosineFloatVectorValuesTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.index.mapper.vectors; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.NumericDocValues; import org.elasticsearch.test.ESTestCase; @@ -25,7 +26,7 @@ public class DenormalizedCosineFloatVectorValuesTests extends ESTestCase { wrap(new float[0][0]), wrapMagnitudes(new float[0]) ); - assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.nextDoc()); + assertEquals(NO_MORE_DOCS, normalizedCosineFloatVectorValues.iterator().nextDoc()); } public void testRandomVectors() throws IOException { @@ -47,9 +48,10 @@ public class DenormalizedCosineFloatVectorValuesTests extends ESTestCase { wrapMagnitudes(magnitudes) ); + KnnVectorValues.DocIndexIterator iterator = normalizedCosineFloatVectorValues.iterator(); for (int i = 0; i < numVectors; i++) { - assertEquals(i, normalizedCosineFloatVectorValues.advance(i)); - assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(), (float) 1e-6); + assertEquals(i, iterator.advance(i)); + assertArrayEquals(vectors[i], normalizedCosineFloatVectorValues.vectorValue(iterator.index()), (float) 1e-6); assertEquals(magnitudes[i], normalizedCosineFloatVectorValues.magnitude(), (float) 1e-6); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java index c007156c806e..baade683a90f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/vectors/KnnDenseVectorScriptDocValuesTests.java @@ -208,7 +208,41 @@ public class KnnDenseVectorScriptDocValuesTests extends ESTestCase { } @Override - public byte[] vectorValue() { + public DocIndexIterator iterator() { + return new DocIndexIterator() { + @Override + public int index() { + return index; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) { + if (target >= size()) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return 0; + } + }; + } + + @Override + public byte[] vectorValue(int ord) { + assert ord == index; for (int i = 0; i < byteVector.length; i++) { byteVector[i] = (byte) vectors[index][i]; } @@ -216,25 +250,12 @@ public class KnnDenseVectorScriptDocValuesTests extends ESTestCase { } @Override - public int docID() { - return index; - } - - @Override - public int nextDoc() { + public ByteVectorValues copy() { throw new UnsupportedOperationException(); } @Override - public int advance(int target) { - if (target >= size()) { - return NO_MORE_DOCS; - } - return index = target; - } - - @Override - public VectorScorer scorer(byte[] floats) throws IOException { + public VectorScorer scorer(byte[] floats) { throw new UnsupportedOperationException(); } }; @@ -256,30 +277,51 @@ public class KnnDenseVectorScriptDocValuesTests extends ESTestCase { } @Override - public float[] vectorValue() { + public DocIndexIterator iterator() { + return new DocIndexIterator() { + @Override + public int index() { + return index; + } + + @Override + public int docID() { + return index; + } + + @Override + public int nextDoc() throws IOException { + return advance(index + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target >= size()) { + return NO_MORE_DOCS; + } + return index = target; + } + + @Override + public long cost() { + return 0; + } + }; + } + + @Override + public float[] vectorValue(int ord) { + assert ord == index; return vectors[index]; } @Override - public int docID() { - return index; + public FloatVectorValues copy() { + throw new UnsupportedOperationException(); } @Override - public int nextDoc() { - return advance(index + 1); - } - - @Override - public int advance(int target) { - if (target >= size()) { - return NO_MORE_DOCS; - } - return index = target; - } - - @Override - public VectorScorer scorer(float[] floats) throws IOException { + public VectorScorer scorer(float[] floats) { throw new UnsupportedOperationException(); } }; diff --git a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 0fa8f70525e8..e9ef3ac8ad74 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -207,15 +207,15 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase childFields = context.getMatchingFieldNames(field + ".*"); assertThat(booleanQuery.clauses().size(), equalTo(childFields.size())); for (BooleanClause booleanClause : booleanQuery) { - assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD)); + assertThat(booleanClause.occur(), equalTo(BooleanClause.Occur.SHOULD)); } } else if (context.getFieldType(field).hasDocValues() || context.getFieldType(field).getTextSearchInfo().hasNorms()) { assertThat(constantScoreQuery.getQuery(), instanceOf(FieldExistsQuery.class)); @@ -87,7 +87,7 @@ public class ExistsQueryBuilderTests extends AbstractQueryTestCase { @@ -101,7 +100,7 @@ public class MatchBoolPrefixQueryBuilderTests extends AbstractQueryTestCase allQueriesExceptLast = IntStream.range(0, booleanQuery.clauses().size() - 1) .mapToObj(booleanQuery.clauses()::get) - .map(BooleanClause::getQuery) + .map(BooleanClause::query) .collect(Collectors.toSet()); assertThat( allQueriesExceptLast, @@ -122,13 +121,13 @@ public class MatchBoolPrefixQueryBuilderTests extends AbstractQueryTestCase clause.getOccur() == BooleanClause.Occur.SHOULD) + .filter(clause -> clause.occur() == BooleanClause.Occur.SHOULD) .count(); final int expected = Queries.calculateMinShouldMatch(optionalClauses, queryBuilder.minimumShouldMatch()); assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(expected)); @@ -266,10 +265,12 @@ public class MatchBoolPrefixQueryBuilderTests extends AbstractQueryTestCase query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); + expectThrows(IndexSearcher.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); query.setAnalyzer(new MockGraphAnalyzer(createGiantGraphMultiTerms())); - expectThrows(BooleanQuery.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); + expectThrows(IndexSearcher.TooManyClauses.class, () -> query.parse(Type.PHRASE, TEXT_FIELD_NAME, "")); } private static class MockGraphAnalyzer extends Analyzer { @@ -567,7 +568,7 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase queryBuilder.toQuery(createSearchExecutionContext()) ); - assertThat(e.getMessage(), containsString("Determinizing [ac]*")); + assertThat(e.getMessage(), containsString("Determinizing automaton")); assertThat(e.getMessage(), containsString("would require more than 10000 effort.")); } @@ -775,7 +775,7 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase queryBuilder.toQuery(createSearchExecutionContext()) ); - assertThat(e.getMessage(), containsString("Determinizing [ac]*")); + assertThat(e.getMessage(), containsString("Determinizing automaton")); assertThat(e.getMessage(), containsString("would require more than 10 effort.")); } @@ -925,10 +925,10 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase query.rewrite(searcher)); assertThat(exc.getMessage(), containsString("maxClauseCount")); } finally { - BooleanQuery.setMaxClauseCount(origBoolMaxClauseCount); + IndexSearcher.setMaxClauseCount(origBoolMaxClauseCount); } } } diff --git a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java index 3edf15068838..589019093075 100644 --- a/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/TermsSetQueryBuilderTests.java @@ -238,7 +238,7 @@ public class TermsSetQueryBuilderTests extends AbstractQueryTestCase wrapper = reader -> new FieldMaskingReader( "field", @@ -82,7 +82,7 @@ public class IndexReaderWrapperTests extends ESTestCase { } outerCount.incrementAndGet(); }); - assertEquals(0, wrap.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(0, wrap.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); wrap.close(); assertFalse("wrapped reader is closed", wrap.getIndexReader().tryIncRef()); assertEquals(sourceRefCount, open.getRefCount()); @@ -106,7 +106,7 @@ public class IndexReaderWrapperTests extends ESTestCase { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); searcher.setSimilarity(iwc.getSimilarity()); final AtomicInteger closeCalls = new AtomicInteger(0); CheckedFunction wrapper = reader -> new FieldMaskingReader( @@ -153,7 +153,7 @@ public class IndexReaderWrapperTests extends ESTestCase { writer.addDocument(doc); DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); IndexSearcher searcher = newSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value); + assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits.value()); searcher.setSimilarity(iwc.getSimilarity()); CheckedFunction wrapper = directoryReader -> directoryReader; try ( diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index f15506676dc3..d480f7bfc8d7 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -2793,9 +2793,9 @@ public class IndexShardTests extends IndexShardTestCase { } try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); search = searcher.search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); } CheckedFunction wrapper = reader -> new FieldMaskingReader("foo", reader); closeShards(shard); @@ -2815,9 +2815,9 @@ public class IndexShardTests extends IndexShardTestCase { try (Engine.Searcher searcher = newShard.acquireSearcher("test")) { TopDocs search = searcher.search(new TermQuery(new Term("foo", "bar")), 10); - assertEquals(search.totalHits.value, 0); + assertEquals(search.totalHits.value(), 0); search = searcher.search(new TermQuery(new Term("foobar", "bar")), 10); - assertEquals(search.totalHits.value, 1); + assertEquals(search.totalHits.value(), 1); } try (Engine.GetResult getResult = newShard.get(new Engine.Get(false, false, "1"))) { assertTrue(getResult.exists()); diff --git a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index 6fa405c091da..ccf0bbebcc35 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -437,9 +437,8 @@ public class RefreshListenersTests extends ESTestCase { ) { assertTrue("document not found", getResult.exists()); assertEquals(iteration, getResult.version()); - org.apache.lucene.document.Document document = getResult.docIdAndVersion().reader.document( - getResult.docIdAndVersion().docId - ); + org.apache.lucene.document.Document document = getResult.docIdAndVersion().reader.storedFields() + .document(getResult.docIdAndVersion().docId); assertThat(document.getValues("test"), arrayContaining(testFieldValue)); } } catch (Exception t) { diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java index 70e514355223..aa89f31757ef 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardSplittingQueryTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.ScoreMode; @@ -172,6 +173,7 @@ public class ShardSplittingQueryTests extends ESTestCase { int doc; int numActual = 0; int lastDoc = 0; + StoredFields storedFields = reader.storedFields(); while ((doc = iterator.nextDoc()) != DocIdSetIterator.NO_MORE_DOCS) { lastDoc = doc; while (shard_id.nextDoc() < doc) { @@ -181,7 +183,7 @@ public class ShardSplittingQueryTests extends ESTestCase { } assertEquals(shard_id.docID(), doc); long shardID = shard_id.nextValue(); - BytesRef id = reader.document(doc).getBinaryValue("_id"); + BytesRef id = storedFields.document(doc).getBinaryValue("_id"); String actualId = Uid.decodeId(id.bytes, id.offset, id.length); assertNotEquals(ctx.reader() + " docID: " + doc + " actualID: " + actualId, shardID, targetShardId); } diff --git a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java index b05f287e628a..fa5f713dfd67 100644 --- a/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java +++ b/server/src/test/java/org/elasticsearch/index/similarity/ScriptedSimilarityTests.java @@ -144,7 +144,7 @@ public class ScriptedSimilarityTests extends ESTestCase { 3.2f ); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); r.close(); @@ -236,7 +236,7 @@ public class ScriptedSimilarityTests extends ESTestCase { searcher.setSimilarity(sim); Query query = new BoostQuery(new TermQuery(new Term("f", "foo")), 3.2f); TopDocs topDocs = searcher.search(query, 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertTrue(initCalled.get()); assertTrue(called.get()); assertEquals(42, topDocs.scoreDocs[0].score, 0); diff --git a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java index 394ce35c6b49..38e6ca0be064 100644 --- a/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/FsDirectoryFactoryTests.java @@ -33,10 +33,16 @@ import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; +import java.util.HashMap; import java.util.Locale; +import java.util.Map; +import java.util.Set; +import java.util.function.BiPredicate; public class FsDirectoryFactoryTests extends ESTestCase { + final PreLoadExposingFsDirectoryFactory fsDirectoryFactory = new PreLoadExposingFsDirectoryFactory(); + public void testPreload() throws IOException { doTestPreload(); doTestPreload("nvd", "dvd", "tim"); @@ -60,10 +66,11 @@ public class FsDirectoryFactoryTests extends ESTestCase { assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.tmp", newIOContext(random()))); assertTrue(FsDirectoryFactory.HybridDirectory.useDelegate("foo.fdt__0.tmp", newIOContext(random()))); MMapDirectory delegate = hybridDirectory.getDelegate(); - assertThat(delegate, Matchers.instanceOf(FsDirectoryFactory.PreLoadMMapDirectory.class)); - FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) delegate; - assertTrue(preLoadMMapDirectory.useDelegate("foo.dvd")); - assertTrue(preLoadMMapDirectory.useDelegate("foo.tmp")); + assertThat(delegate, Matchers.instanceOf(MMapDirectory.class)); + var func = fsDirectoryFactory.preLoadFuncMap.get(delegate); + assertTrue(func.test("foo.dvd", newIOContext(random()))); + assertTrue(func.test("foo.tmp", newIOContext(random()))); + fsDirectoryFactory.preLoadFuncMap.clear(); } } @@ -72,7 +79,21 @@ public class FsDirectoryFactoryTests extends ESTestCase { Path tempDir = createTempDir().resolve(idxSettings.getUUID()).resolve("0"); Files.createDirectories(tempDir); ShardPath path = new ShardPath(false, tempDir, tempDir, new ShardId(idxSettings.getIndex(), 0)); - return new FsDirectoryFactory().newDirectory(idxSettings, path); + return fsDirectoryFactory.newDirectory(idxSettings, path); + } + + static class PreLoadExposingFsDirectoryFactory extends FsDirectoryFactory { + + // expose for testing + final Map> preLoadFuncMap = new HashMap<>(); + + @Override + public MMapDirectory setPreload(MMapDirectory mMapDirectory, Set preLoadExtensions) { + var preLoadFunc = FsDirectoryFactory.getPreloadFunc(preLoadExtensions); + mMapDirectory.setPreload(preLoadFunc); + preLoadFuncMap.put(mMapDirectory, preLoadFunc); + return mMapDirectory; + } } private void doTestPreload(String... preload) throws IOException { @@ -85,26 +106,23 @@ public class FsDirectoryFactoryTests extends ESTestCase { assertSame(dir, directory); // prevent warnings assertFalse(directory instanceof SleepingLockWrapper); var mmapDirectory = FilterDirectory.unwrap(directory); + assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); if (preload.length == 0) { - assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); - assertFalse(((MMapDirectory) mmapDirectory).getPreload()); + assertEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.NO_FILES); } else if (Arrays.asList(preload).contains("*")) { - assertTrue(directory.toString(), mmapDirectory instanceof MMapDirectory); - assertTrue(((MMapDirectory) mmapDirectory).getPreload()); + assertEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.ALL_FILES); } else { - assertTrue(directory.toString(), mmapDirectory instanceof FsDirectoryFactory.PreLoadMMapDirectory); - FsDirectoryFactory.PreLoadMMapDirectory preLoadMMapDirectory = (FsDirectoryFactory.PreLoadMMapDirectory) mmapDirectory; + var func = fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory); + assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.ALL_FILES); + assertNotEquals(fsDirectoryFactory.preLoadFuncMap.get(mmapDirectory), MMapDirectory.NO_FILES); + assertTrue(func.test("foo.dvd", newIOContext(random()))); + assertTrue(func.test("foo.tmp", newIOContext(random()))); for (String ext : preload) { - assertTrue("ext: " + ext, preLoadMMapDirectory.useDelegate("foo." + ext)); - assertTrue("ext: " + ext, preLoadMMapDirectory.getDelegate().getPreload()); + assertTrue("ext: " + ext, func.test("foo." + ext, newIOContext(random()))); } - assertFalse(preLoadMMapDirectory.useDelegate("XXX")); - assertFalse(preLoadMMapDirectory.getPreload()); - preLoadMMapDirectory.close(); - expectThrows( - AlreadyClosedException.class, - () -> preLoadMMapDirectory.getDelegate().openInput("foo.tmp", IOContext.DEFAULT) - ); + assertFalse(func.test("XXX", newIOContext(random()))); + mmapDirectory.close(); + expectThrows(AlreadyClosedException.class, () -> mmapDirectory.openInput("foo.tmp", IOContext.DEFAULT)); } } expectThrows( @@ -148,7 +166,7 @@ public class FsDirectoryFactoryTests extends ESTestCase { ); break; case FS: - if (Constants.JRE_IS_64BIT && MMapDirectory.UNMAP_SUPPORTED) { + if (Constants.JRE_IS_64BIT) { assertTrue(FsDirectoryFactory.isHybridFs(directory)); } else { assertTrue(directory.toString(), directory instanceof NIOFSDirectory); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index ebffd54a742c..4f7367247194 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -70,8 +70,9 @@ public class IndicesQueryCacheTests extends ESTestCase { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + return new DefaultScorerSupplier(scorer); } @Override @@ -348,16 +349,22 @@ public class IndicesQueryCacheTests extends ESTestCase { return weight.explain(context, doc); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - scorerCalled = true; - return weight.scorer(context); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { scorerSupplierCalled = true; - return weight.scorerSupplier(context); + ScorerSupplier inScorerSupplier = weight.scorerSupplier(context); + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + scorerCalled = true; + return inScorerSupplier.get(leadCost); + } + + @Override + public long cost() { + return inScorerSupplier.cost(); + } + }; } @Override diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java index dfd71bba0208..773c660caa1c 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java @@ -421,8 +421,8 @@ public class IndicesRequestCacheTests extends ESTestCase { try (BytesStreamOutput out = new BytesStreamOutput()) { IndexSearcher searcher = newSearcher(reader); TopDocs topDocs = searcher.search(new TermQuery(new Term("id", Integer.toString(id))), 1); - assertEquals(1, topDocs.totalHits.value); - Document document = reader.document(topDocs.scoreDocs[0].doc); + assertEquals(1, topDocs.totalHits.value()); + Document document = reader.storedFields().document(topDocs.scoreDocs[0].doc); out.writeString(document.get("value")); loadedFromCache = false; return out.bytes(); diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java index baa35101c1c8..1c1a9a645b99 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorSearchAfterTests.java @@ -114,11 +114,11 @@ public class SinglePassGroupingCollectorSearchAfterTests extends ESTestCase { TopFieldDocs topDocs = searcher.search(query, topFieldCollectorManager); TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); assertEquals(sortField.getField(), collapseTopFieldDocs.field); - assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); + assertEquals(totalHits, collapseTopFieldDocs.totalHits.value()); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); - assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation); - assertEquals(totalHits, topDocs.totalHits.value); + assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation()); + assertEquals(totalHits, topDocs.totalHits.value()); Object currentValue = null; int topDocsIndex = 0; diff --git a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java index 90adb2d0ffcc..30c68fe708c8 100644 --- a/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/grouping/SinglePassGroupingCollectorTests.java @@ -49,11 +49,11 @@ import java.util.Set; public class SinglePassGroupingCollectorTests extends ESTestCase { private static class SegmentSearcher extends IndexSearcher { - private final List ctx; + private final LeafReaderContextPartition[] ctx; SegmentSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); - this.ctx = Collections.singletonList(ctx); + this.ctx = new LeafReaderContextPartition[] { IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx) }; } public void search(Weight weight, Collector collector) throws IOException { @@ -62,7 +62,7 @@ public class SinglePassGroupingCollectorTests extends ESTestCase { @Override public String toString() { - return "ShardSearcher(" + ctx.get(0) + ")"; + return "ShardSearcher(" + ctx[0] + ")"; } } @@ -140,10 +140,10 @@ public class SinglePassGroupingCollectorTests extends ESTestCase { TopFieldGroups collapseTopFieldDocs = collapsingCollector.getTopGroups(0); assertEquals(collapseField.getField(), collapseTopFieldDocs.field); assertEquals(expectedNumGroups, collapseTopFieldDocs.scoreDocs.length); - assertEquals(totalHits, collapseTopFieldDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation); + assertEquals(totalHits, collapseTopFieldDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, collapseTopFieldDocs.totalHits.relation()); assertEquals(totalHits, topDocs.scoreDocs.length); - assertEquals(totalHits, topDocs.totalHits.value); + assertEquals(totalHits, topDocs.totalHits.value()); Set seen = new HashSet<>(); // collapse field is the last sort diff --git a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java index 71a270355531..076cd0af1bf2 100644 --- a/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/queries/BlendedTermQueryTests.java @@ -88,7 +88,7 @@ public class BlendedTermQueryTests extends ESTestCase { query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { BooleanQuery.Builder query = new BooleanQuery.Builder(); @@ -110,7 +110,7 @@ public class BlendedTermQueryTests extends ESTestCase { query.add(gen, BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 4); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(1), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } { @@ -120,8 +120,8 @@ public class BlendedTermQueryTests extends ESTestCase { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); if (termQuery.getTerm().field().equals("unknown_field")) { assertThat(termStates.docFreq(), equalTo(0)); @@ -131,7 +131,7 @@ public class BlendedTermQueryTests extends ESTestCase { assertThat(termStates.totalTermFreq(), greaterThan(0L)); } } - assertThat(searcher.search(query, 10).totalHits.value, equalTo((long) iters + username.length)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo((long) iters + username.length)); } { // test with an unknown field and an unknown term @@ -140,13 +140,13 @@ public class BlendedTermQueryTests extends ESTestCase { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); assertThat(termStates.docFreq(), equalTo(0)); assertThat(termStates.totalTermFreq(), equalTo(0L)); } - assertThat(searcher.search(query, 10).totalHits.value, equalTo(0L)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo(0L)); } { // test with an unknown field and a term that is present in only one field @@ -155,8 +155,8 @@ public class BlendedTermQueryTests extends ESTestCase { Query rewrite = searcher.rewrite(query); assertThat(rewrite, instanceOf(BooleanQuery.class)); for (BooleanClause clause : (BooleanQuery) rewrite) { - assertThat(clause.getQuery(), instanceOf(TermQuery.class)); - TermQuery termQuery = (TermQuery) clause.getQuery(); + assertThat(clause.query(), instanceOf(TermQuery.class)); + TermQuery termQuery = (TermQuery) clause.query(); TermStates termStates = termQuery.getTermStates(); if (termQuery.getTerm().field().equals("username")) { assertThat(termStates.docFreq(), equalTo(1)); @@ -166,7 +166,7 @@ public class BlendedTermQueryTests extends ESTestCase { assertThat(termStates.totalTermFreq(), equalTo(0L)); } } - assertThat(searcher.search(query, 10).totalHits.value, equalTo(1L)); + assertThat(searcher.search(query, 10).totalHits.value(), equalTo(1L)); } reader.close(); w.close(); @@ -250,7 +250,7 @@ public class BlendedTermQueryTests extends ESTestCase { Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); } reader.close(); w.close(); @@ -298,7 +298,7 @@ public class BlendedTermQueryTests extends ESTestCase { String[] fieldNames = fields.keySet().toArray(new String[0]); Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fieldNames, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); - assertTrue(search.totalHits.value > 0); + assertTrue(search.totalHits.value() > 0); assertTrue(search.scoreDocs.length > 0); } reader.close(); @@ -332,7 +332,7 @@ public class BlendedTermQueryTests extends ESTestCase { Query query = BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f); TopDocs search = searcher.search(query, 10); ScoreDoc[] scoreDocs = search.scoreDocs; - assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); + assertEquals(Integer.toString(0), reader.storedFields().document(scoreDocs[0].doc).getField("id").stringValue()); reader.close(); w.close(); diff --git a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java index f5266568e6fd..126641037fde 100644 --- a/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java +++ b/server/src/test/java/org/elasticsearch/lucene/search/uhighlight/CustomUnifiedHighlighterTests.java @@ -141,7 +141,7 @@ public class CustomUnifiedHighlighterTests extends ESTestCase { IndexSearcher searcher = newSearcher(reader); iw.close(); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER); - assertThat(topDocs.totalHits.value, equalTo(1L)); + assertThat(topDocs.totalHits.value(), equalTo(1L)); String rawValue = Strings.arrayToDelimitedString(inputs, String.valueOf(MULTIVAL_SEP_CHAR)); UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, analyzer); builder.withBreakIterator(() -> breakIterator); diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java index 239c90bdee2f..78cd90e8f526 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTermStatsTests.java @@ -16,6 +16,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; @@ -336,10 +337,11 @@ public class ScriptTermStatsTests extends ESTestCase { withIndexSearcher(searcher -> { for (LeafReaderContext leafReaderContext : searcher.getLeafContexts()) { IndexReader reader = leafReaderContext.reader(); + StoredFields storedFields = reader.storedFields(); DocIdSetIterator docIdSetIterator = DocIdSetIterator.all(reader.maxDoc()); ScriptTermStats termStats = new ScriptTermStats(searcher, leafReaderContext, docIdSetIterator::docID, terms); while (docIdSetIterator.nextDoc() <= reader.maxDoc()) { - String docId = reader.document(docIdSetIterator.docID()).get("id"); + String docId = storedFields.document(docIdSetIterator.docID()).get("id"); if (expectedValues.containsKey(docId)) { assertThat(function.apply(termStats), expectedValues.get(docId)); } diff --git a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index 6a6f5dc44ef6..663b39d11691 100644 --- a/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/server/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -714,11 +714,8 @@ public class MultiValueModeTests extends ESTestCase { @Override public long nextOrd() { - if (i < array[doc].length) { - return array[doc][i++]; - } else { - return NO_MORE_ORDS; - } + assert i < array[doc].length; + return array[doc][i++]; } @Override @@ -762,7 +759,8 @@ public class MultiValueModeTests extends ESTestCase { } int expected = -1; if (values.advanceExact(i)) { - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int j = 0; j < values.docValueCount(); j++) { + long ord = values.nextOrd(); if (expected == -1) { expected = (int) ord; } else { @@ -810,7 +808,8 @@ public class MultiValueModeTests extends ESTestCase { if (++count > maxChildren) { break; } - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + for (int i = 0; i < values.docValueCount(); i++) { + long ord = values.nextOrd(); if (expected == -1) { expected = (int) ord; } else { diff --git a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java index 5e1296c35401..aa2e76f512cc 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchCancellationTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.document.KnnFloatVectorField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.PointValues; @@ -105,14 +106,17 @@ public class SearchCancellationTests extends ESTestCase { true ); - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(reader.numDocs())); searcher.addQueryCancellation(cancellation); - expectThrows(TaskCancelledException.class, () -> searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager())); + expectThrows( + TaskCancelledException.class, + () -> searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())) + ); searcher.removeQueryCancellation(cancellation); - Integer totalHits2 = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits2 = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits2, equalTo(reader.numDocs())); } @@ -203,15 +207,17 @@ public class SearchCancellationTests extends ESTestCase { cancelled.set(false); // Avoid exception during construction of the wrapper objects FloatVectorValues vectorValues = searcher.getIndexReader().leaves().get(0).reader().getFloatVectorValues(KNN_FIELD_NAME); cancelled.set(true); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); // On the first doc when already canceled, it throws - expectThrows(TaskCancelledException.class, vectorValues::nextDoc); + expectThrows(TaskCancelledException.class, iterator::nextDoc); cancelled.set(false); // Avoid exception during construction of the wrapper objects FloatVectorValues uncancelledVectorValues = searcher.getIndexReader().leaves().get(0).reader().getFloatVectorValues(KNN_FIELD_NAME); + uncancelledVectorValues.iterator(); cancelled.set(true); searcher.removeQueryCancellation(cancellation); // On the first doc when already canceled, it throws, but with the cancellation removed, it should not - uncancelledVectorValues.nextDoc(); + iterator.nextDoc(); } private static class PointValuesIntersectVisitor implements PointValues.IntersectVisitor { diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index 642804730a14..5dc07a41b3f8 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -772,7 +772,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { ), (response) -> { SearchHits hits = response.getHits(); - assertEquals(hits.getTotalHits().value, numDocs); + assertEquals(hits.getTotalHits().value(), numDocs); assertEquals(hits.getHits().length, 2); int index = 0; for (SearchHit hit : hits.getHits()) { @@ -2505,7 +2505,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { ); PlainActionFuture future = new PlainActionFuture<>(); service.executeQueryPhase(request, task, future.delegateFailure((l, r) -> { - assertEquals(1, r.queryResult().getTotalHits().value); + assertEquals(1, r.queryResult().getTotalHits().value()); l.onResponse(null); })); future.get(); @@ -2714,7 +2714,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { SearchShardTask task = new SearchShardTask(0, "type", "action", "description", null, emptyMap()); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNotNull(searchContext.searcher().getExecutor()); + assertTrue(searchContext.searcher().hasExecutor()); } try { @@ -2725,7 +2725,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { .get(); assertTrue(response.isAcknowledged()); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNull(searchContext.searcher().getExecutor()); + assertFalse(searchContext.searcher().hasExecutor()); } } finally { // reset original default setting @@ -2735,7 +2735,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { .setPersistentSettings(Settings.builder().putNull(SEARCH_WORKER_THREADS_ENABLED.getKey()).build()) .get(); try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, randomBoolean())) { - assertNotNull(searchContext.searcher().getExecutor()); + assertTrue(searchContext.searcher().hasExecutor()); } } } @@ -2778,7 +2778,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.DFS, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2795,7 +2795,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "DFS supports parallel collection, so the number of slices should be > 1.", @@ -2808,7 +2808,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2825,7 +2825,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", @@ -2838,9 +2838,9 @@ public class SearchServiceTests extends ESSingleNodeTestCase { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.FETCH, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 as FETCH does not support parallel collection and thus runs on the calling" @@ -2854,9 +2854,9 @@ public class SearchServiceTests extends ESSingleNodeTestCase { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.NONE, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 as NONE does not support parallel collection.", @@ -2877,9 +2877,9 @@ public class SearchServiceTests extends ESSingleNodeTestCase { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNull(searcher.getExecutor()); + assertFalse(searcher.hasExecutor()); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "The number of slices should be 1 when QUERY parallel collection is disabled.", @@ -2899,7 +2899,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { { try (SearchContext searchContext = service.createContext(readerContext, request, task, ResultsType.QUERY, true)) { ContextIndexSearcher searcher = searchContext.searcher(); - assertNotNull(searcher.getExecutor()); + assertTrue(searcher.hasExecutor()); final int maxPoolSize = executor.getMaximumPoolSize(); assertEquals( @@ -2916,7 +2916,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase { assertNotEquals("Sanity check to ensure this isn't the default of 1 when pool size is unset", 1, expectedSlices); final long priorExecutorTaskCount = executor.getCompletedTaskCount(); - searcher.search(termQuery, new TotalHitCountCollectorManager()); + searcher.search(termQuery, new TotalHitCountCollectorManager(searcher.getSlices())); assertBusy( () -> assertEquals( "QUERY supports parallel collection when enabled, so the number of slices should be > 1.", diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java index 6b4618bf3257..ac5d886c9ba1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/MultiBucketCollectorTests.java @@ -33,14 +33,8 @@ import java.util.concurrent.atomic.AtomicBoolean; import static org.hamcrest.Matchers.equalTo; public class MultiBucketCollectorTests extends ESTestCase { - private static class ScoreAndDoc extends Scorable { + private static class Score extends Scorable { float score; - int doc = -1; - - @Override - public int docID() { - return doc; - } @Override public float score() { @@ -247,7 +241,7 @@ public class MultiBucketCollectorTests extends ESTestCase { collector1 = new TerminateAfterBucketCollector(collector1, 1); collector2 = new TerminateAfterBucketCollector(collector2, 2); - Scorable scorer = new ScoreAndDoc(); + Scorable scorer = new Score(); List collectors = Arrays.asList(collector1, collector2); Collections.shuffle(collectors, random()); @@ -275,4 +269,78 @@ public class MultiBucketCollectorTests extends ESTestCase { assertFalse(setScorerCalled1.get()); assertFalse(setScorerCalled2.get()); } + + public void testCacheScores() throws IOException { + ScoringBucketCollector scoringBucketCollector1 = new ScoringBucketCollector(); + ScoringBucketCollector scoringBucketCollector2 = new ScoringBucketCollector(); + + DummyScorable scorable = new DummyScorable(); + + // First test the tester + LeafBucketCollector leafBucketCollector1 = scoringBucketCollector1.getLeafCollector(null); + LeafBucketCollector leafBucketCollector2 = scoringBucketCollector2.getLeafCollector(null); + leafBucketCollector1.setScorer(scorable); + leafBucketCollector2.setScorer(scorable); + leafBucketCollector1.collect(0, 0); + leafBucketCollector2.collect(0, 0); + assertEquals(2, scorable.numScoreCalls); + + // reset + scorable.numScoreCalls = 0; + LeafBucketCollector leafBucketCollector = MultiBucketCollector.wrap( + randomBoolean(), + Arrays.asList(scoringBucketCollector1, scoringBucketCollector2) + ).getLeafCollector(null); + leafBucketCollector.setScorer(scorable); + leafBucketCollector.collect(0, 0); + // Even though both leaf collectors called scorable.score(), it only got called once thanks to caching + assertEquals(1, scorable.numScoreCalls); + } + + private static class ScoringBucketCollector extends BucketCollector { + @Override + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; // needs scores + } + + @Override + public LeafBucketCollector getLeafCollector(AggregationExecutionContext aggCtx) throws IOException { + return new ScoringLeafBucketCollector(); + } + + @Override + public void preCollection() throws IOException { + + } + + @Override + public void postCollection() throws IOException { + + } + } + + private static class ScoringLeafBucketCollector extends LeafBucketCollector { + + private Scorable scorable; + + @Override + public void setScorer(Scorable scorer) throws IOException { + this.scorable = scorer; + } + + @Override + public void collect(int doc, long owningBucketOrd) throws IOException { + scorable.score(); + } + } + + private static class DummyScorable extends Scorable { + int numScoreCalls = 0; + + @Override + public float score() throws IOException { + numScoreCalls++; + return 42f; + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java index 879c7e6aeff7..eb5fa734a8c9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTestCase.java @@ -80,12 +80,12 @@ public abstract class ShardSizeTestCase extends ESIntegTestCase { indexRandom(true, docs); assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing1).setQuery(matchAllQuery()), resp -> { - long totalOnOne = resp.getHits().getTotalHits().value; + long totalOnOne = resp.getHits().getTotalHits().value(); assertThat(totalOnOne, is(15L)); }); assertNoFailuresAndResponse(prepareSearch("idx").setRouting(routing2).setQuery(matchAllQuery()), resp -> { assertNoFailures(resp); - long totalOnTwo = resp.getHits().getTotalHits().value; + long totalOnTwo = resp.getHits().getTotalHits().value(); assertThat(totalOnTwo, is(12L)); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index bbd12726ac4e..28a032e7281e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -2571,19 +2571,19 @@ public class CompositeAggregatorTests extends AggregatorTestCase { TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=c}", result.getBuckets().get(1).getKeyAsString()); assertEquals(2L, result.getBuckets().get(1).getDocCount()); topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=d}", result.getBuckets().get(2).getKeyAsString()); assertEquals(1L, result.getBuckets().get(2).getDocCount()); topHits = result.getBuckets().get(2).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); + assertEquals(topHits.getHits().getTotalHits().value(), 1L); }); testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new FieldExistsQuery("keyword")), dataset, () -> { @@ -2598,13 +2598,13 @@ public class CompositeAggregatorTests extends AggregatorTestCase { TopHits topHits = result.getBuckets().get(0).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 2); - assertEquals(topHits.getHits().getTotalHits().value, 2L); + assertEquals(topHits.getHits().getTotalHits().value(), 2L); assertEquals("{keyword=d}", result.getBuckets().get(1).getKeyAsString()); assertEquals(1L, result.getBuckets().get(1).getDocCount()); topHits = result.getBuckets().get(1).getAggregations().get("top_hits"); assertNotNull(topHits); assertEquals(topHits.getHits().getHits().length, 1); - assertEquals(topHits.getHits().getTotalHits().value, 1L); + assertEquals(topHits.getHits().getTotalHits().value(), 1L); }); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java index 71b93888ba24..8a72f8af7035 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeValuesCollectorQueueTests.java @@ -15,14 +15,29 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.TextField; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafMetaData; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PointValues; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.StoredFields; +import org.apache.lucene.index.TermVectors; +import org.apache.lucene.index.Terms; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.DocIdSet; +import org.apache.lucene.search.KnnCollector; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; @@ -54,8 +69,6 @@ import static org.elasticsearch.index.mapper.NumberFieldMapper.NumberType.DOUBLE import static org.elasticsearch.index.mapper.NumberFieldMapper.NumberType.LONG; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class CompositeValuesCollectorQueueTests extends AggregatorTestCase { static class ClassAndName { @@ -71,11 +84,8 @@ public class CompositeValuesCollectorQueueTests extends AggregatorTestCase { private IndexReader indexReader; @Before - public void setUpMocks() { - indexReader = mock(IndexReader.class); - IndexReaderContext indexReaderContext = mock(IndexReaderContext.class); - when(indexReaderContext.leaves()).thenReturn(List.of()); - when(indexReader.getContext()).thenReturn(indexReaderContext); + public void set() { + indexReader = new DummyReader(); } public void testRandomLong() throws IOException { @@ -425,4 +435,126 @@ public class CompositeValuesCollectorQueueTests extends AggregatorTestCase { } } } + + static class DummyReader extends LeafReader { + @Override + public CacheHelper getCoreCacheHelper() { + return null; + } + + @Override + public Terms terms(String field) throws IOException { + return null; + } + + @Override + public NumericDocValues getNumericDocValues(String field) throws IOException { + return null; + } + + @Override + public BinaryDocValues getBinaryDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedDocValues getSortedDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException { + return null; + } + + @Override + public SortedSetDocValues getSortedSetDocValues(String field) throws IOException { + return null; + } + + @Override + public NumericDocValues getNormValues(String field) throws IOException { + return null; + } + + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + return null; + } + + @Override + public FloatVectorValues getFloatVectorValues(String field) throws IOException { + return null; + } + + @Override + public ByteVectorValues getByteVectorValues(String field) throws IOException { + return null; + } + + @Override + public void searchNearestVectors(String field, float[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + + } + + @Override + public void searchNearestVectors(String field, byte[] target, KnnCollector knnCollector, Bits acceptDocs) throws IOException { + + } + + @Override + public FieldInfos getFieldInfos() { + return null; + } + + @Override + public Bits getLiveDocs() { + return null; + } + + @Override + public PointValues getPointValues(String field) throws IOException { + return null; + } + + @Override + public void checkIntegrity() throws IOException { + + } + + @Override + public LeafMetaData getMetaData() { + return null; + } + + @Override + public TermVectors termVectors() throws IOException { + return null; + } + + @Override + public int numDocs() { + return 0; + } + + @Override + public int maxDoc() { + return 0; + } + + @Override + public StoredFields storedFields() throws IOException { + return null; + } + + @Override + protected void doClose() throws IOException { + + } + + @Override + public CacheHelper getReaderCacheHelper() { + return null; + } + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java index 4b9a72bacc97..3d1ed0704acf 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/SingleDimensionValuesSourceTests.java @@ -11,6 +11,7 @@ package org.elasticsearch.search.aggregations.bucket.composite; import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.Term; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.ConstantScoreQuery; @@ -258,7 +259,7 @@ public class SingleDimensionValuesSourceTests extends ESTestCase { } private static IndexReader mockIndexReader(int maxDoc, int numDocs) { - IndexReader reader = mock(IndexReader.class); + IndexReader reader = mock(LeafReader.class); when(reader.hasDeletions()).thenReturn(maxDoc - numDocs > 0); when(reader.maxDoc()).thenReturn(maxDoc); when(reader.numDocs()).thenReturn(numDocs); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java index 26e643510859..06f1db352e8f 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java @@ -42,9 +42,7 @@ public class BinaryRangeAggregatorTests extends ESTestCase { @Override public long nextOrd() { - if (i == ords.length) { - return NO_MORE_ORDS; - } + assert i < ords.length; return ords[i++]; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java index 4a08295bd7bc..48aabb61371e 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/RareTermsAggregatorTests.java @@ -414,7 +414,7 @@ public class RareTermsAggregatorTests extends AggregatorTestCase { InternalTopHits topHits = bucket.getAggregations().get("top_hits"); TotalHits hits = topHits.getHits().getTotalHits(); assertNotNull(hits); - assertThat(hits.value, equalTo(counter)); + assertThat(hits.value(), equalTo(counter)); assertThat(topHits.getHits().getMaxScore(), equalTo(Float.NaN)); counter += 1; } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 2c76ed96da48..b267cb2e656b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -1730,8 +1730,8 @@ public class TermsAggregatorTests extends AggregatorTestCase { int ptr = 9; for (MultiBucketsAggregation.Bucket bucket : terms.getBuckets()) { InternalTopHits topHits = bucket.getAggregations().get("top_hits"); - assertThat(topHits.getHits().getTotalHits().value, equalTo((long) ptr)); - assertEquals(TotalHits.Relation.EQUAL_TO, topHits.getHits().getTotalHits().relation); + assertThat(topHits.getHits().getTotalHits().value(), equalTo((long) ptr)); + assertEquals(TotalHits.Relation.EQUAL_TO, topHits.getHits().getTotalHits().relation()); if (withScore) { assertThat(topHits.getHits().getMaxScore(), equalTo(1f)); } else { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 87eb2bdc29fb..07d535167b31 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -220,8 +220,8 @@ public class InternalTopHitsTests extends InternalAggregationTestCase size += between(1, 100); case 3 -> topDocs = new TopDocsAndMaxScore( new TopDocs( - new TotalHits(topDocs.topDocs.totalHits.value + between(1, 100), topDocs.topDocs.totalHits.relation), + new TotalHits(topDocs.topDocs.totalHits.value() + between(1, 100), topDocs.topDocs.totalHits.relation()), topDocs.topDocs.scoreDocs ), topDocs.maxScore + randomFloat() ); case 4 -> { TotalHits totalHits = new TotalHits( - searchHits.getTotalHits().value + between(1, 100), + searchHits.getTotalHits().value() + between(1, 100), randomFrom(TotalHits.Relation.values()) ); searchHits = SearchHits.unpooled(searchHits.getHits(), totalHits, searchHits.getMaxScore() + randomFloat()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java index 0c5217ded982..6fb147e3ffc8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregatorTests.java @@ -55,7 +55,7 @@ public class TopHitsAggregatorTests extends AggregatorTestCase { result = testCase(query, topHits("_name")); } SearchHits searchHits = ((TopHits) result).getHits(); - assertEquals(3L, searchHits.getTotalHits().value); + assertEquals(3L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("2", searchHits.getAt(1).getId()); assertEquals("1", searchHits.getAt(2).getId()); @@ -65,7 +65,7 @@ public class TopHitsAggregatorTests extends AggregatorTestCase { public void testNoResults() throws Exception { TopHits result = (TopHits) testCase(new MatchNoDocsQuery(), topHits("_name").sort("string", SortOrder.DESC)); SearchHits searchHits = result.getHits(); - assertEquals(0L, searchHits.getTotalHits().value); + assertEquals(0L, searchHits.getTotalHits().value()); assertFalse(AggregationInspectionHelper.hasValue(((InternalTopHits) result))); } @@ -89,27 +89,27 @@ public class TopHitsAggregatorTests extends AggregatorTestCase { // The "a" bucket TopHits hits = (TopHits) terms.getBucketByKey("a").getAggregations().get("top"); SearchHits searchHits = (hits).getHits(); - assertEquals(2L, searchHits.getTotalHits().value); + assertEquals(2L, searchHits.getTotalHits().value()); assertEquals("2", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("a").getAggregations().get("top")))); // The "b" bucket searchHits = ((TopHits) terms.getBucketByKey("b").getAggregations().get("top")).getHits(); - assertEquals(2L, searchHits.getTotalHits().value); + assertEquals(2L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertEquals("1", searchHits.getAt(1).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("b").getAggregations().get("top")))); // The "c" bucket searchHits = ((TopHits) terms.getBucketByKey("c").getAggregations().get("top")).getHits(); - assertEquals(1L, searchHits.getTotalHits().value); + assertEquals(1L, searchHits.getTotalHits().value()); assertEquals("2", searchHits.getAt(0).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("c").getAggregations().get("top")))); // The "d" bucket searchHits = ((TopHits) terms.getBucketByKey("d").getAggregations().get("top")).getHits(); - assertEquals(1L, searchHits.getTotalHits().value); + assertEquals(1L, searchHits.getTotalHits().value()); assertEquals("3", searchHits.getAt(0).getId()); assertTrue(AggregationInspectionHelper.hasValue(((InternalTopHits) terms.getBucketByKey("d").getAggregations().get("top")))); } @@ -179,7 +179,7 @@ public class TopHitsAggregatorTests extends AggregatorTestCase { .build(); AggregationBuilder agg = AggregationBuilders.topHits("top_hits"); TopHits result = searchAndReduce(reader, new AggTestConfig(agg, STRING_FIELD_TYPE).withQuery(query)); - assertEquals(3, result.getHits().getTotalHits().value); + assertEquals(3, result.getHits().getTotalHits().value()); reader.close(); directory.close(); } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java index e47614145e92..7fa2732191cd 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java @@ -88,12 +88,9 @@ public class IncludeExcludeTests extends ESTestCase { @Override public long nextOrd() { - if (consumed) { - return SortedSetDocValues.NO_MORE_ORDS; - } else { - consumed = true; - return 0; - } + assert consumed == false; + consumed = true; + return 0; } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java index bc6b72d9ddd3..2a36887cc459 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java @@ -124,11 +124,8 @@ public class MissingValuesTests extends ESTestCase { @Override public long nextOrd() { - if (i < ords[doc].length) { - return ords[doc][i++]; - } else { - return NO_MORE_ORDS; - } + assert i < ords[doc].length; + return ords[doc][i++]; } @Override @@ -153,10 +150,8 @@ public class MissingValuesTests extends ESTestCase { for (int ord : ords[i]) { assertEquals(values[ord], withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); } - assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } else { assertEquals(missing, withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java index caf8df55ce52..71fd3a4761cb 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/TimeSeriesIndexSearcherTests.java @@ -201,7 +201,10 @@ public class TimeSeriesIndexSearcherTests extends ESTestCase { BucketCollector collector = getBucketCollector(2 * DOC_COUNTS); // skip the first doc of segment 1 and 2 - indexSearcher.search(SortedSetDocValuesField.newSlowSetQuery("_tsid", new BytesRef("tsid0"), new BytesRef("tsid1")), collector); + indexSearcher.search( + SortedSetDocValuesField.newSlowSetQuery("_tsid", List.of(new BytesRef("tsid0"), new BytesRef("tsid1"))), + collector + ); collector.postCollection(); reader.close(); diff --git a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java index 34ee0eec101b..9957d8c92b95 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/internal/ContextIndexSearcherTests.java @@ -28,7 +28,6 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.BoostQuery; -import org.apache.lucene.search.BulkScorer; import org.apache.lucene.search.Collector; import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.ConstantScoreQuery; @@ -47,6 +46,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TotalHitCountCollectorManager; @@ -76,7 +76,6 @@ import org.elasticsearch.test.IndexSettingsModule; import java.io.IOException; import java.io.UncheckedIOException; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.IdentityHashMap; @@ -251,7 +250,7 @@ public class ContextIndexSearcherTests extends ESTestCase { Integer.MAX_VALUE, 1 ); - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertEquals(numDocs, totalHits.intValue()); int numExpectedTasks = ContextIndexSearcher.computeSlices(searcher.getIndexReader().leaves(), Integer.MAX_VALUE, 1).length; // check that each slice except for one that executes on the calling thread goes to the executor, no matter the queue size @@ -367,7 +366,7 @@ public class ContextIndexSearcherTests extends ESTestCase { assertEquals(1, searcher.count(new CreateScorerOnceQuery(new MatchAllDocsQuery()))); TopDocs topDocs = searcher.search(new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("foo", "bar"))), 3f), 1); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(1, topDocs.scoreDocs.length); assertEquals(3f, topDocs.scoreDocs[0].score, 0); @@ -406,7 +405,7 @@ public class ContextIndexSearcherTests extends ESTestCase { int sumDocs = 0; assertThat(slices.length, lessThanOrEqualTo(numThreads)); for (LeafSlice slice : slices) { - int sliceDocs = Arrays.stream(slice.leaves).mapToInt(l -> l.reader().maxDoc()).sum(); + int sliceDocs = slice.getMaxDocs(); assertThat(sliceDocs, greaterThanOrEqualTo((int) (0.1 * numDocs))); sumDocs += sliceDocs; } @@ -497,9 +496,14 @@ public class ContextIndexSearcherTests extends ESTestCase { } return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { contextIndexSearcher.throwTimeExceededException(); - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + Scorer scorer = new ConstantScoreScorer( + score(), + scoreMode, + DocIdSetIterator.all(context.reader().maxDoc()) + ); + return new DefaultScorerSupplier(scorer); } @Override @@ -583,7 +587,10 @@ public class ContextIndexSearcherTests extends ESTestCase { return null; } }; - Integer hitCount = contextIndexSearcher.search(testQuery, new TotalHitCountCollectorManager()); + Integer hitCount = contextIndexSearcher.search( + testQuery, + new TotalHitCountCollectorManager(contextIndexSearcher.getSlices()) + ); assertEquals(0, hitCount.intValue()); assertTrue(contextIndexSearcher.timeExceeded()); } finally { @@ -747,15 +754,9 @@ public class ContextIndexSearcherTests extends ESTestCase { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.scorer(context); - } - - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - assertTrue(seenLeaves.add(context.reader().getCoreCacheHelper().getKey())); - return weight.bulkScorer(context); + return weight.scorerSupplier(context); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java index c42f3156c6d2..b728d4090057 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileCollectorManagerTests.java @@ -125,14 +125,14 @@ public class ProfileCollectorManagerTests extends ESTestCase { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(10, null, 1000); String profileReason = "profiler_reason"; ProfileCollectorManager profileCollectorManager = new ProfileCollectorManager<>(topDocsManager, profileReason); TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), profileCollectorManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); CollectorResult result = profileCollectorManager.getCollectorTree(); assertEquals("profiler_reason", result.getReason()); assertEquals("SimpleTopScoreDocCollector", result.getName()); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java index e868293ef4a1..98d79df63db8 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/ProfileScorerTests.java @@ -10,14 +10,12 @@ package org.elasticsearch.search.profile.query; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.MultiReader; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Matches; import org.apache.lucene.search.MatchesIterator; import org.apache.lucene.search.Query; -import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; @@ -34,10 +32,6 @@ public class ProfileScorerTests extends ESTestCase { public float maxScore, minCompetitiveScore; - protected FakeScorer(Weight weight) { - super(weight); - } - @Override public DocIdSetIterator iterator() { throw new UnsupportedOperationException(); @@ -75,22 +69,14 @@ public class ProfileScorerTests extends ESTestCase { return Explanation.match(1, "fake_description"); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - FakeScorer fakeScorer = new FakeScorer(this); - fakeScorer.maxScore = 42f; - return fakeScorer; - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) { - Weight weight = this; return new ScorerSupplier() { private long cost = 0; @Override public Scorer get(long leadCost) { - return new Scorer(weight) { + return new Scorer() { @Override public DocIdSetIterator iterator() { return null; @@ -187,23 +173,17 @@ public class ProfileScorerTests extends ESTestCase { } public void testPropagateMinCompetitiveScore() throws IOException { - Query query = new MatchAllDocsQuery(); - Weight weight = query.createWeight(newSearcher(new MultiReader()), ScoreMode.TOP_SCORES, 1f); - FakeScorer fakeScorer = new FakeScorer(weight); + FakeScorer fakeScorer = new FakeScorer(); QueryProfileBreakdown profile = new QueryProfileBreakdown(); - ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); - ProfileScorer profileScorer = new ProfileScorer(profileWeight, fakeScorer, profile); + ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); assertEquals(0.42f, fakeScorer.minCompetitiveScore, 0f); } public void testPropagateMaxScore() throws IOException { - Query query = new MatchAllDocsQuery(); - Weight weight = query.createWeight(newSearcher(new MultiReader()), ScoreMode.TOP_SCORES, 1f); - FakeScorer fakeScorer = new FakeScorer(weight); + FakeScorer fakeScorer = new FakeScorer(); QueryProfileBreakdown profile = new QueryProfileBreakdown(); - ProfileWeight profileWeight = new ProfileWeight(query, weight, profile); - ProfileScorer profileScorer = new ProfileScorer(profileWeight, fakeScorer, profile); + ProfileScorer profileScorer = new ProfileScorer(fakeScorer, profile); profileScorer.setMinCompetitiveScore(0.42f); fakeScorer.maxScore = 42f; assertEquals(42f, profileScorer.getMaxScore(DocIdSetIterator.NO_MORE_DOCS), 0f); diff --git a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java index 9df03905f7be..44c46e3f692b 100644 --- a/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java +++ b/server/src/test/java/org/elasticsearch/search/profile/query/QueryProfilerTests.java @@ -241,11 +241,6 @@ public class QueryProfilerTests extends ESTestCase { throw new UnsupportedOperationException(); } - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - throw new UnsupportedOperationException(); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { return new ScorerSupplier() { diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java index d33c033ecef2..de6218e91295 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java @@ -120,7 +120,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } { CollectorManager topScoreDocManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -133,7 +133,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(new TermQuery(new Term("field2", "value")), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } } @@ -150,7 +150,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -165,7 +165,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(new TermQuery(new Term("field2", "value")), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); assertEquals(numField2Docs, result.aggs.intValue()); } } @@ -184,7 +184,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -199,7 +199,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } } @@ -218,7 +218,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -235,7 +235,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(new MatchAllDocsQuery(), manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); // post_filter is not applied to aggs assertEquals(reader.maxDoc(), result.aggs.intValue()); } @@ -251,7 +251,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } @@ -266,7 +266,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -279,7 +279,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -292,7 +292,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); } } @@ -306,7 +306,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } @@ -322,7 +322,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); // min_score is applied to aggs as well as top docs assertEquals(numField2Docs, result.aggs.intValue()); } @@ -338,7 +338,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numDocs, result.topDocs.totalHits.value); + assertEquals(numDocs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -353,7 +353,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); assertEquals(0, result.aggs.intValue()); } } @@ -370,7 +370,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } @@ -385,7 +385,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value); + assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -398,7 +398,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); } { CollectorManager topDocsManager = new TopScoreDocCollectorManager(1, null, 1000); @@ -411,7 +411,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); } } @@ -427,7 +427,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } @@ -443,7 +443,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value); + assertEquals(numField2AndField3Docs, result.topDocs.totalHits.value()); assertEquals(numField3Docs, result.aggs.intValue()); } { @@ -458,7 +458,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(numField2Docs, result.topDocs.totalHits.value); + assertEquals(numField2Docs, result.topDocs.totalHits.value()); assertEquals(numDocs, result.aggs.intValue()); } { @@ -473,7 +473,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertFalse(result.terminatedAfter); - assertEquals(0, result.topDocs.totalHits.value); + assertEquals(0, result.topDocs.totalHits.value()); assertEquals(0, result.aggs.intValue()); } } @@ -623,7 +623,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -638,7 +638,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); } } @@ -651,7 +651,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField2Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -667,7 +667,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); assertEquals(terminateAfter, result.aggs.intValue()); } } @@ -683,7 +683,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -698,7 +698,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); } } @@ -713,7 +713,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { { CollectorManager topDocsManager = new TopScoreDocCollectorManager(numField3Docs + 1, null, 1000); TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); - assertEquals(numDocs, topDocs.totalHits.value); + assertEquals(numDocs, topDocs.totalHits.value()); maxScore = topDocs.scoreDocs[0].score; } { @@ -729,7 +729,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { ); Result result = searcher.search(booleanQuery, manager); assertTrue(result.terminatedAfter); - assertEquals(terminateAfter, result.topDocs.totalHits.value); + assertEquals(terminateAfter, result.topDocs.totalHits.value()); // aggs see more documents because the filter is not applied to them assertThat(result.aggs, Matchers.greaterThanOrEqualTo(terminateAfter)); } @@ -1139,11 +1139,6 @@ public class QueryPhaseCollectorTests extends ESTestCase { public float score() { return 0; } - - @Override - public int docID() { - return 0; - } }; QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector( @@ -1473,11 +1468,6 @@ public class QueryPhaseCollectorTests extends ESTestCase { return 0; } - @Override - public int docID() { - return 0; - } - @Override public void setMinCompetitiveScore(float minScore) { setMinCompetitiveScoreCalled = true; @@ -1521,7 +1511,7 @@ public class QueryPhaseCollectorTests extends ESTestCase { setScorerCalled = true; if (expectedScorable != null) { while (expectedScorable.equals(scorer.getClass()) == false && scorer instanceof FilterScorable) { - scorer = scorer.getChildren().iterator().next().child; + scorer = scorer.getChildren().iterator().next().child(); } assertEquals(expectedScorable, scorer.getClass()); } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 56a8b0f3a8c3..1f74668158e0 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -144,7 +144,7 @@ public class QueryPhaseTests extends IndexShardTestCase { QueryPhase.addCollectorsAndSearch(context); ContextIndexSearcher countSearcher = shouldCollectCount ? newContextSearcher(reader) : noCollectionContextSearcher(reader); - assertEquals(countSearcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(countSearcher.count(query), context.queryResult().topDocs().topDocs.totalHits.value()); } } @@ -233,15 +233,15 @@ public class QueryPhaseTests extends IndexShardTestCase { try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 10), new MatchAllDocsQuery())) { // shortcutTotalHitCount makes us not track total hits as part of the top docs collection, hence size is the threshold context.setSize(10); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // QueryPhaseCollector does not propagate Weight#count when a post_filter is provided, hence it forces collection despite @@ -249,16 +249,16 @@ public class QueryPhaseTests extends IndexShardTestCase { context.setSize(0); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.executeQuery(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // shortcutTotalHitCount is disabled for filter collectors, hence we collect until track_total_hits context.setSize(10); context.parsedPostFilter(new ParsedQuery(new MatchNoDocsQuery())); QueryPhase.addCollectorsAndSearch(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } } @@ -269,8 +269,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.setSize(10); context.parsedPostFilter(new ParsedQuery(new TermQuery(new Term("foo", "bar")))); QueryPhase.addCollectorsAndSearch(context); - assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(1, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } } @@ -280,15 +280,15 @@ public class QueryPhaseTests extends IndexShardTestCase { try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 10), new MatchAllDocsQuery())) { // shortcutTotalHitCount makes us not track total hits as part of the top docs collection, hence size is the threshold context.setSize(10); QueryPhase.addCollectorsAndSearch(context); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // QueryPhaseCollector does not propagate Weight#count when min_score is provided, hence it forces collection despite @@ -296,16 +296,16 @@ public class QueryPhaseTests extends IndexShardTestCase { context.setSize(0); context.minimumScore(100); QueryPhase.addCollectorsAndSearch(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } try (TestSearchContext context = createContext(newContextSearcher(reader), new MatchAllDocsQuery())) { // shortcutTotalHitCount is disabled for filter collectors, hence we collect until track_total_hits context.setSize(10); context.minimumScore(100); QueryPhase.executeQuery(context); - assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertEquals(0, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); } } @@ -336,17 +336,17 @@ public class QueryPhaseTests extends IndexShardTestCase { context.setSize(size); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); context.setSearcher(earlyTerminationContextSearcher(reader, size)); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); - assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); + assertEquals(TotalHits.Relation.EQUAL_TO, context.queryResult().topDocs().topDocs.totalHits.relation()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0].doc, greaterThanOrEqualTo(size)); } } @@ -364,8 +364,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.setSize(0); QueryPhase.addCollectorsAndSearch(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } // test interaction between trackTotalHits and terminateAfter @@ -375,8 +375,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } try (TestSearchContext context = createContext(noCollectionContextSearcher(reader), new MatchAllDocsQuery())) { @@ -387,8 +387,8 @@ public class QueryPhaseTests extends IndexShardTestCase { QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); // Given that total hit count does not require collection, PartialHitCountCollector does not early terminate. - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } } @@ -407,8 +407,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.setSize(0); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } // test interaction between trackTotalHits and terminateAfter @@ -419,8 +419,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } { @@ -434,9 +434,9 @@ public class QueryPhaseTests extends IndexShardTestCase { context.trackTotalHitsUpTo(trackTotalHits); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) trackTotalHits)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) trackTotalHits)); assertThat( - context.queryResult().topDocs().topDocs.totalHits.relation, + context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) ); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); @@ -449,8 +449,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.trackTotalHitsUpTo(randomIntBetween(11, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(10L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(10L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(0)); } } @@ -468,7 +468,7 @@ public class QueryPhaseTests extends IndexShardTestCase { context.setSize(10); QueryPhase.executeQuery(context); assertFalse(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(10)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 1), new MatchAllDocsQuery())) { @@ -477,8 +477,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.setSize(1); QueryPhase.addCollectorsAndSearch(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } // test interaction between trackTotalHits and terminateAfter @@ -489,8 +489,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), new MatchAllDocsQuery())) { @@ -500,8 +500,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.trackTotalHitsUpTo(randomIntBetween(1, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } { @@ -515,8 +515,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.trackTotalHitsUpTo(randomIntBetween(1, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertThat(context.queryResult().terminatedEarly(), either(is(true)).or(is(false))); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(size)); } } @@ -535,8 +535,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.setSize(1); QueryPhase.addCollectorsAndSearch(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(1L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); } // test interaction between trackTotalHits and terminateAfter @@ -546,8 +546,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.trackTotalHitsUpTo(-1); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(0L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(0L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -558,8 +558,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.trackTotalHitsUpTo(randomIntBetween(1, 6)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -572,8 +572,8 @@ public class QueryPhaseTests extends IndexShardTestCase { // depending on docs distribution we may or may not be able to honor terminate_after: low scoring hits are skipped via // setMinCompetitiveScore, which bypasses terminate_after until the next leaf collector is pulled, when that happens. assertThat(context.queryResult().terminatedEarly(), either(is(true)).or(is(false))); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(5)); } try (TestSearchContext context = createContext(earlyTerminationContextSearcher(reader, 7), query)) { @@ -584,9 +584,9 @@ public class QueryPhaseTests extends IndexShardTestCase { context.trackTotalHitsUpTo(randomIntBetween(8, Integer.MAX_VALUE)); QueryPhase.executeQuery(context); assertTrue(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(7L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(7L)); // TODO this looks off, it should probably be GREATER_THAN_OR_EQUAL_TO - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(7)); } } @@ -599,8 +599,8 @@ public class QueryPhaseTests extends IndexShardTestCase { context.setSize(1); context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW })); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(context.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -612,7 +612,7 @@ public class QueryPhaseTests extends IndexShardTestCase { context.parsedPostFilter(new ParsedQuery(new MinDocQuery(1))); QueryPhase.addCollectorsAndSearch(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo(numDocs - 1L)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo(numDocs - 1L)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -623,7 +623,7 @@ public class QueryPhaseTests extends IndexShardTestCase { context.sort(new SortAndFormats(sort, new DocValueFormat[] { DocValueFormat.RAW })); QueryPhase.executeQuery(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(1)); assertThat(context.queryResult().topDocs().topDocs.scoreDocs[0], instanceOf(FieldDoc.class)); FieldDoc fieldDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; @@ -686,18 +686,18 @@ public class QueryPhaseTests extends IndexShardTestCase { context.sort(searchSortAndFormat); QueryPhase.addCollectorsAndSearch(context); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertNull(context.queryResult().terminatedEarly()); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); int sizeMinus1 = context.queryResult().topDocs().topDocs.scoreDocs.length - 1; FieldDoc lastDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[sizeMinus1]; context.setSearcher(earlyTerminationContextSearcher(reader, 10)); QueryPhase.addCollectorsAndSearch(context); assertNull(context.queryResult().terminatedEarly()); - assertThat(context.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); + assertThat(context.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); assertThat(context.terminateAfter(), equalTo(0)); - assertThat(context.queryResult().getTotalHits().value, equalTo((long) numDocs)); + assertThat(context.queryResult().getTotalHits().value(), equalTo((long) numDocs)); FieldDoc firstDoc = (FieldDoc) context.queryResult().topDocs().topDocs.scoreDocs[0]; for (int i = 0; i < searchSortAndFormat.sort.getSort().length; i++) { @SuppressWarnings("unchecked") @@ -746,8 +746,8 @@ public class QueryPhaseTests extends IndexShardTestCase { ); assertEquals(collectorManager.newCollector().scoreMode(), org.apache.lucene.search.ScoreMode.COMPLETE); QueryPhase.executeQuery(context); - assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value); - assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.EQUAL_TO); + assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value()); + assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation(), TotalHits.Relation.EQUAL_TO); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); } try (TestSearchContext context = createContext(newContextSearcher(reader), q)) { @@ -764,9 +764,9 @@ public class QueryPhaseTests extends IndexShardTestCase { ); assertEquals(collectorManager.newCollector().scoreMode(), org.apache.lucene.search.ScoreMode.TOP_DOCS); QueryPhase.executeQuery(context); - assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(5, context.queryResult().topDocs().topDocs.totalHits.value()); assertThat(context.queryResult().topDocs().topDocs.scoreDocs.length, equalTo(3)); - assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation, TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); + assertEquals(context.queryResult().topDocs().topDocs.totalHits.relation(), TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO); } } @@ -881,8 +881,8 @@ public class QueryPhaseTests extends IndexShardTestCase { QueryPhase.addCollectorsAndSearch(searchContext); assertTrue(searchContext.sort().sort.getSort()[0].getOptimizeSortWithPoints()); assertThat(searchContext.queryResult().topDocs().topDocs.scoreDocs, arrayWithSize(0)); - assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.value, equalTo((long) numDocs)); - assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.value(), equalTo((long) numDocs)); + assertThat(searchContext.queryResult().topDocs().topDocs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO)); } // 7. Test that sort optimization doesn't break a case where from = 0 and size= 0 @@ -950,8 +950,8 @@ public class QueryPhaseTests extends IndexShardTestCase { // assert score docs are in order and their number is as expected private static void assertSortResults(TopDocs topDocs, long totalNumDocs, boolean isDoubleSort) { - assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, topDocs.totalHits.relation); - assertThat(topDocs.totalHits.value, lessThan(totalNumDocs)); // we collected less docs than total number + assertEquals(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO, topDocs.totalHits.relation()); + assertThat(topDocs.totalHits.value(), lessThan(totalNumDocs)); // we collected less docs than total number long cur1, cur2; long prev1 = Long.MIN_VALUE; long prev2 = Long.MIN_VALUE; @@ -990,7 +990,7 @@ public class QueryPhaseTests extends IndexShardTestCase { context.trackTotalHitsUpTo(5); QueryPhase.addCollectorsAndSearch(context); - assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(10, context.queryResult().topDocs().topDocs.totalHits.value()); } } @@ -1136,7 +1136,7 @@ public class QueryPhaseTests extends IndexShardTestCase { ) { @Override - public void search(List leaves, Weight weight, Collector collector) throws IOException { + public void search(LeafReaderContextPartition[] partitions, Weight weight, Collector collector) throws IOException { final Collector in = new FilterCollector(collector) { @Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { @@ -1153,7 +1153,7 @@ public class QueryPhaseTests extends IndexShardTestCase { }; } }; - super.search(leaves, weight, in); + super.search(partitions, weight, in); } }; } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java index 3bf9514cad54..b417f7adbc8b 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTimeoutTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.Scorable; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.Weight; import org.apache.lucene.store.Directory; import org.apache.lucene.tests.index.RandomIndexWriter; @@ -132,7 +133,7 @@ public class QueryPhaseTimeoutTests extends IndexShardTestCase { try (SearchContext context = createSearchContext(query, size)) { QueryPhase.executeQuery(context); assertFalse(context.queryResult().searchTimedOut()); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); assertEquals(size, context.queryResult().topDocs().topDocs.scoreDocs.length); } } @@ -142,7 +143,7 @@ public class QueryPhaseTimeoutTests extends IndexShardTestCase { QueryPhase.executeQuery(context); assertTrue(context.queryResult().searchTimedOut()); int firstSegmentMaxDoc = reader.leaves().get(0).reader().maxDoc(); - assertEquals(Math.min(2048, firstSegmentMaxDoc), context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(Math.min(2048, firstSegmentMaxDoc), context.queryResult().topDocs().topDocs.totalHits.value()); assertEquals(Math.min(size, firstSegmentMaxDoc), context.queryResult().topDocs().topDocs.scoreDocs.length); } } @@ -159,14 +160,14 @@ public class QueryPhaseTimeoutTests extends IndexShardTestCase { boolean firstSegment = true; @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { if (firstSegment == false && isTimeoutExpected) { shouldTimeout = true; } timeoutTrigger.accept(context); assert shouldTimeout == false : "should have already timed out"; firstSegment = false; - return super.scorer(context); + return super.scorerSupplier(context); } }; } @@ -180,7 +181,7 @@ public class QueryPhaseTimeoutTests extends IndexShardTestCase { try (SearchContext context = createSearchContext(query, size)) { QueryPhase.executeQuery(context); assertFalse(context.queryResult().searchTimedOut()); - assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(numDocs, context.queryResult().topDocs().topDocs.totalHits.value()); assertEquals(size, context.queryResult().topDocs().topDocs.scoreDocs.length); } } @@ -190,7 +191,7 @@ public class QueryPhaseTimeoutTests extends IndexShardTestCase { QueryPhase.executeQuery(context); assertTrue(context.queryResult().searchTimedOut()); int firstSegmentMaxDoc = reader.leaves().get(0).reader().maxDoc(); - assertEquals(Math.min(2048, firstSegmentMaxDoc), context.queryResult().topDocs().topDocs.totalHits.value); + assertEquals(Math.min(2048, firstSegmentMaxDoc), context.queryResult().topDocs().topDocs.totalHits.value()); assertEquals(Math.min(size, firstSegmentMaxDoc), context.queryResult().topDocs().topDocs.scoreDocs.length); } } @@ -202,33 +203,49 @@ public class QueryPhaseTimeoutTests extends IndexShardTestCase { public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { return new MatchAllWeight(this, boost, scoreMode) { @Override - public BulkScorer bulkScorer(LeafReaderContext context) { - final float score = score(); - final int maxDoc = context.reader().maxDoc(); - return new BulkScorer() { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier inScorerSupplier = super.scorerSupplier(context); + return new ScorerSupplier() { @Override - public int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException { - assert shouldTimeout == false : "should have already timed out"; - max = Math.min(max, maxDoc); - ScoreAndDoc scorer = new ScoreAndDoc(); - scorer.score = score; - collector.setScorer(scorer); - for (int doc = min; doc < max; ++doc) { - scorer.doc = doc; - if (acceptDocs == null || acceptDocs.get(doc)) { - collector.collect(doc); - } - } - if (timeoutExpected) { - // timeout after collecting the first batch of documents from the 1st segment, or the entire 1st segment - shouldTimeout = true; - } - return max == maxDoc ? DocIdSetIterator.NO_MORE_DOCS : max; + public Scorer get(long leadCost) throws IOException { + return inScorerSupplier.get(leadCost); } @Override public long cost() { - return 0; + return inScorerSupplier.cost(); + } + + @Override + public BulkScorer bulkScorer() throws IOException { + final float score = score(); + final int maxDoc = context.reader().maxDoc(); + return new BulkScorer() { + @Override + public int score(LeafCollector collector, Bits acceptDocs, int min, int max) throws IOException { + assert shouldTimeout == false : "should have already timed out"; + max = Math.min(max, maxDoc); + Score scorer = new Score(); + scorer.score = score; + collector.setScorer(scorer); + for (int doc = min; doc < max; ++doc) { + if (acceptDocs == null || acceptDocs.get(doc)) { + collector.collect(doc); + } + } + if (timeoutExpected) { + // timeout after collecting the first batch of documents from the 1st segment, or the entire 1st + // segment + shouldTimeout = true; + } + return max == maxDoc ? DocIdSetIterator.NO_MORE_DOCS : max; + } + + @Override + public long cost() { + return 0; + } + }; } }; } @@ -258,14 +275,8 @@ public class QueryPhaseTimeoutTests extends IndexShardTestCase { return context; } - private static class ScoreAndDoc extends Scorable { + private static class Score extends Scorable { float score; - int doc = -1; - - @Override - public int docID() { - return doc; - } @Override public float score() { @@ -315,8 +326,9 @@ public class QueryPhaseTimeoutTests extends IndexShardTestCase { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new ConstantScoreScorer(this, score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + Scorer scorer = new ConstantScoreScorer(score(), scoreMode, DocIdSetIterator.all(context.reader().maxDoc())); + return new DefaultScorerSupplier(scorer); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java index b295b78453f9..e8f88f3297b7 100644 --- a/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/retriever/rankdoc/RankDocsQueryBuilderTests.java @@ -160,7 +160,7 @@ public class RankDocsQueryBuilderTests extends AbstractQueryTestCase context = mapping.parseContext(document); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 320e3fce2e83..7791073ef36f 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -211,8 +211,8 @@ public class DirectCandidateGeneratorTests extends ESTestCase { ) ); assertThat(candidateSet.candidates.length, equalTo(1)); - assertThat(candidateSet.candidates[0].termStats.docFreq, equalTo(numDocs - 1)); - assertThat(candidateSet.candidates[0].termStats.totalTermFreq, equalTo((long) numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1)); // test that it doesn't overflow assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE)); @@ -227,8 +227,8 @@ public class DirectCandidateGeneratorTests extends ESTestCase { ) ); assertThat(candidateSet.candidates.length, equalTo(1)); - assertThat(candidateSet.candidates[0].termStats.docFreq, equalTo(numDocs - 1)); - assertThat(candidateSet.candidates[0].termStats.totalTermFreq, equalTo((long) numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.docFreq(), equalTo(numDocs - 1)); + assertThat(candidateSet.candidates[0].termStats.totalTermFreq(), equalTo((long) numDocs - 1)); // test that it doesn't overflow assertThat(generator.thresholdTermFrequency(Integer.MAX_VALUE), equalTo(Integer.MAX_VALUE)); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java b/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java index dc9d026af013..72ae45fd2614 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/AbstractDenseVectorQueryTestCase.java @@ -226,7 +226,7 @@ abstract class AbstractDenseVectorQueryTestCase extends ESTestCase { int n = random().nextInt(100) + 1; TopDocs results = searcher.search(query, n); assert reader.hasDeletions() == false; - assertTrue(results.totalHits.value >= results.scoreDocs.length); + assertTrue(results.totalHits.value() >= results.scoreDocs.length); // verify the results are in descending score order float last = Float.MAX_VALUE; for (ScoreDoc scoreDoc : results.scoreDocs) { diff --git a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java index 18d5c8c85fbe..bef0bbfd27ff 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/KnnScoreDocQueryBuilderTests.java @@ -282,8 +282,8 @@ public class KnnScoreDocQueryBuilderTests extends AbstractQueryTestCase scoreDoc.doc)); assertEquals(scoreDocs.length, topDocs.scoreDocs.length); diff --git a/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java b/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java index 8d9fa847a988..f2ead93ebb6e 100644 --- a/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java +++ b/server/src/test/java/org/elasticsearch/search/vectors/VectorSimilarityQueryTests.java @@ -62,14 +62,14 @@ public class VectorSimilarityQueryTests extends ESTestCase { new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), 3f, 0.25f), 5 ); - assertThat(docs.totalHits.value, equalTo(5L)); + assertThat(docs.totalHits.value(), equalTo(5L)); // Should match only 4 docs = searcher.search( new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), 1f, 0.5f), 5 ); - assertThat(docs.totalHits.value, equalTo(4L)); + assertThat(docs.totalHits.value(), equalTo(4L)); } } } @@ -138,14 +138,14 @@ public class VectorSimilarityQueryTests extends ESTestCase { new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), .8f, .9f), 5 ); - assertThat(docs.totalHits.value, equalTo(5L)); + assertThat(docs.totalHits.value(), equalTo(5L)); // Should match only 4 docs = searcher.search( new VectorSimilarityQuery(new KnnFloatVectorQuery("float_vector", new float[] { 1, 1, 1 }, 5), .9f, 0.95f), 5 ); - assertThat(docs.totalHits.value, equalTo(4L)); + assertThat(docs.totalHits.value(), equalTo(4L)); } } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index f8c3edcbb9d4..c46d98fe1cd8 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -372,7 +372,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final AtomicBoolean documentCountVerified = new AtomicBoolean(); continueOrDie(searchResponseListener, r -> { - assertEquals(documents, Objects.requireNonNull(r.getHits().getTotalHits()).value); + assertEquals(documents, Objects.requireNonNull(r.getHits().getTotalHits()).value()); documentCountVerified.set(true); }); @@ -816,7 +816,10 @@ public class SnapshotResiliencyTests extends ESTestCase { var response = safeResult(searchResponseListener); try { - assertEquals(documentsFirstSnapshot + documentsSecondSnapshot, Objects.requireNonNull(response.getHits().getTotalHits()).value); + assertEquals( + documentsFirstSnapshot + documentsSecondSnapshot, + Objects.requireNonNull(response.getHits().getTotalHits()).value() + ); } finally { response.decRef(); } @@ -1177,7 +1180,7 @@ public class SnapshotResiliencyTests extends ESTestCase { final AtomicBoolean documentCountVerified = new AtomicBoolean(); continueOrDie(searchResponseStepListener, r -> { - final long hitCount = r.getHits().getTotalHits().value; + final long hitCount = r.getHits().getTotalHits().value(); assertThat( "Documents were restored but the restored index mapping was older than some documents and misses some of their fields", (int) hitCount, diff --git a/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java b/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java index 9e5b9dd0be54..7f2cb85919d1 100644 --- a/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java +++ b/test/external-modules/latency-simulating-directory/src/internalClusterTest/java/org/elasticsearch/test/simulatedlatencyrepo/LatencySimulatingBlobStoreRepositoryTests.java @@ -140,7 +140,7 @@ public class LatencySimulatingBlobStoreRepositoryTests extends AbstractSnapshotI logger.info("--> run a search"); assertResponse(client.prepareSearch("test-idx").setQuery(QueryBuilders.termQuery("text", "sometext")), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertThat(COUNTS.intValue(), greaterThan(0)); }); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index 0b5803e9887d..4713adf6cf01 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -178,7 +178,7 @@ public abstract class EngineTestCase extends ESTestCase { engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(numDocs)); } } @@ -970,7 +970,7 @@ public abstract class EngineTestCase extends ESTestCase { engine.refresh("test"); } try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(searcher.getSlices())); assertThat(totalHits, equalTo(numDocs)); } } @@ -1168,7 +1168,10 @@ public abstract class EngineTestCase extends ESTestCase { assertVisibleCount(replicaEngine, lastFieldValue == null ? 0 : 1); if (lastFieldValue != null) { try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { - Integer totalHits = searcher.search(new TermQuery(new Term("value", lastFieldValue)), new TotalHitCountCollectorManager()); + Integer totalHits = searcher.search( + new TermQuery(new Term("value", lastFieldValue)), + new TotalHitCountCollectorManager(searcher.getSlices()) + ); assertThat(totalHits, equalTo(1)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java index 98f18829966c..47a227cebc95 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/AbstractScriptFieldTypeTestCase.java @@ -502,7 +502,7 @@ public abstract class AbstractScriptFieldTypeTestCase extends MapperServiceTestC } protected final String readSource(IndexReader reader, int docId) throws IOException { - return reader.document(docId).getBinaryValue("_source").utf8ToString(); + return reader.storedFields().document(docId).getBinaryValue("_source").utf8ToString(); } protected final void checkExpensiveQuery(BiConsumer queryBuilder) { diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java index b6aa3f97241e..1c4cfa4ec7ff 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/FieldTypeTestCase.java @@ -8,6 +8,7 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -139,6 +140,7 @@ public abstract class FieldTypeTestCase extends ESTestCase { randomBoolean(), IndexOptions.NONE, DocValuesType.NONE, + DocValuesSkipIndexType.NONE, -1, new HashMap<>(), 1, diff --git a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java index 2ddd153b8a93..c76967e5d00a 100644 --- a/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/indices/analysis/AnalysisFactoryTestCase.java @@ -198,7 +198,9 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase { entry("daitchmokotoffsoundex", Void.class), entry("persianstem", Void.class), // not exposed - entry("word2vecsynonym", Void.class) + entry("word2vecsynonym", Void.class), + // not exposed + entry("romaniannormalization", Void.class) ); static final Map> KNOWN_CHARFILTERS = Map.of( diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index cc4aac686a02..df1ea6b75640 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -100,7 +100,7 @@ public enum SearchResponseUtils { } public static long getTotalHitsValue(SearchRequestBuilder request) { - return getTotalHits(request).value; + return getTotalHits(request).value(); } public static SearchResponse responseAsSearchResponse(Response searchResponse) throws IOException { diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index d35d5282238e..5f64d123c1be 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -33,6 +33,7 @@ import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.sandbox.document.HalfFloatPoint; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; @@ -158,6 +159,7 @@ import java.io.UncheckedIOException; import java.net.InetAddress; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -831,11 +833,8 @@ public abstract class AggregatorTestCase extends ESTestCase { QueryCachingPolicy queryCachingPolicy, MappedFieldType... fieldTypes ) throws IOException { - // Don't use searchAndReduce because we only want a single aggregator. - IndexSearcher searcher = newIndexSearcher( - reader, - aggregationBuilder.supportsParallelCollection(field -> getCardinality(reader, field)) - ); + // Don't use searchAndReduce because we only want a single aggregator, disable parallel collection too. + IndexSearcher searcher = newIndexSearcher(reader, false); if (queryCachingPolicy != null) { searcher.setQueryCachingPolicy(queryCachingPolicy); } @@ -854,7 +853,21 @@ public abstract class AggregatorTestCase extends ESTestCase { try { Aggregator aggregator = createAggregator(builder, context); aggregator.preCollection(); - searcher.search(context.query(), aggregator.asCollector()); + searcher.search(context.query(), new CollectorManager() { + boolean called = false; + + @Override + public Collector newCollector() { + assert called == false : "newCollector called multiple times"; + called = true; + return aggregator.asCollector(); + } + + @Override + public Void reduce(Collection collectors) { + return null; + } + }); InternalAggregation r = aggregator.buildTopLevel(); r = doReduce( List.of(r), @@ -959,11 +972,11 @@ public abstract class AggregatorTestCase extends ESTestCase { } private static class ShardSearcher extends IndexSearcher { - private final List ctx; + private final LeafReaderContextPartition[] ctx; ShardSearcher(LeafReaderContext ctx, IndexReaderContext parent) { super(parent); - this.ctx = Collections.singletonList(ctx); + this.ctx = new LeafReaderContextPartition[] { IndexSearcher.LeafReaderContextPartition.createForEntireSegment(ctx) }; } public void search(Weight weight, Collector collector) throws IOException { @@ -972,7 +985,7 @@ public abstract class AggregatorTestCase extends ESTestCase { @Override public String toString() { - return "ShardSearcher(" + ctx.get(0) + ")"; + return "ShardSearcher(" + ctx[0] + ")"; } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java index b1b75c179028..29112b4bd8f5 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractGeoTestCase.java @@ -232,7 +232,7 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase { .setSize(5000), response -> { assertNoFailures(response); - long totalHits = response.getHits().getTotalHits().value; + long totalHits = response.getHits().getTotalHits().value(); XContentBuilder builder = XContentFactory.jsonBuilder(); ChunkedToXContent.wrapAsToXContent(response).toXContent(builder, ToXContent.EMPTY_PARAMS); logger.info("Full high_card_idx Response Content:\n{ {} }", Strings.toString(builder)); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java index f191012fb4ef..f87a87c5ddbc 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/CentroidAggregationTestBase.java @@ -40,7 +40,7 @@ public abstract class CentroidAggregationTestBase extends AbstractGeoTestCase { .addAggregation(centroidAgg(aggName()).field(SINGLE_VALUED_FIELD_NAME)), response -> { CentroidAggregation geoCentroid = response.getAggregations().get(aggName()); - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); assertThat(geoCentroid, notNullValue()); assertThat(geoCentroid.getName(), equalTo(aggName())); assertThat(geoCentroid.centroid(), equalTo(null)); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java index cb6a58ed65a0..a0fdb0bfabf9 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/metrics/SpatialBoundsAggregationTestBase.java @@ -155,7 +155,7 @@ public abstract class SpatialBoundsAggregationTestBase e assertNoFailuresAndResponse( client().prepareSearch(EMPTY_IDX_NAME).setQuery(matchAllQuery()).addAggregation(boundsAgg(aggName(), SINGLE_VALUED_FIELD_NAME)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); SpatialBounds geoBounds = response.getAggregations().get(aggName()); assertThat(geoBounds, notNullValue()); assertThat(geoBounds.getName(), equalTo(aggName())); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 71956b431d9b..fd41213dcd81 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -104,7 +104,7 @@ public abstract class BasePointShapeQueryTestCase { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -113,7 +113,7 @@ public abstract class BasePointShapeQueryTestCase { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -170,7 +170,7 @@ public abstract class BasePointShapeQueryTestCase { SearchHits searchHits = response.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(1L)); + assertThat(searchHits.getTotalHits().value(), equalTo(1L)); assertThat(searchHits.getAt(0).getId(), equalTo("1")); } ); @@ -209,7 +209,7 @@ public abstract class BasePointShapeQueryTestCase { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); @@ -219,7 +219,7 @@ public abstract class BasePointShapeQueryTestCase { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("2"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("2"))); @@ -229,7 +229,7 @@ public abstract class BasePointShapeQueryTestCase { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -238,7 +238,7 @@ public abstract class BasePointShapeQueryTestCase { - assertThat(response.getHits().getTotalHits().value, equalTo(0L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(0L)); assertThat(response.getHits().getHits().length, equalTo(0)); } ); @@ -264,7 +264,7 @@ public abstract class BasePointShapeQueryTestCase { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -319,7 +319,7 @@ public abstract class BasePointShapeQueryTestCase { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("point2")); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java index 91c7a25682ae..6dca91170c7a 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BaseShapeQueryTestCase.java @@ -206,7 +206,7 @@ public abstract class BaseShapeQueryTestCase { - assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), response.getHits().getTotalHits().value > 0); + assertTrue("query: " + intersects + " doc: " + Strings.toString(docSource), response.getHits().getTotalHits().value() > 0); }); } @@ -352,7 +352,7 @@ public abstract class BaseShapeQueryTestCase { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("blakely")); } @@ -457,7 +457,7 @@ public abstract class BaseShapeQueryTestCase { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } @@ -466,7 +466,7 @@ public abstract class BaseShapeQueryTestCase { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("1")); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java index 4e7378d3a960..3cd52124d855 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/DatelinePointShapeQueryTestCase.java @@ -68,7 +68,7 @@ public class DatelinePointShapeQueryTestCase { GeoShapeQueryBuilder geoShapeQueryBuilder = QueryBuilders.geoShapeQuery(defaultFieldName, rectangle); assertResponse(tests.client().prepareSearch(defaultIndexName).setQuery(geoShapeQueryBuilder), response -> { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("1", searchHits.getAt(0).getId()); assertNotEquals("1", searchHits.getAt(1).getId()); }); @@ -112,7 +112,7 @@ public class DatelinePointShapeQueryTestCase { geoShapeQueryBuilder.relation(ShapeRelation.INTERSECTS); assertResponse(tests.client().prepareSearch(defaultIndexName).setQuery(geoShapeQueryBuilder), response -> { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("1", searchHits.getAt(0).getId()); assertNotEquals("4", searchHits.getAt(0).getId()); assertNotEquals("1", searchHits.getAt(1).getId()); @@ -155,7 +155,7 @@ public class DatelinePointShapeQueryTestCase { geoShapeQueryBuilder.relation(ShapeRelation.INTERSECTS); assertResponse(tests.client().prepareSearch(defaultIndexName).setQuery(geoShapeQueryBuilder), response -> { SearchHits searchHits = response.getHits(); - assertEquals(2, searchHits.getTotalHits().value); + assertEquals(2, searchHits.getTotalHits().value()); assertNotEquals("3", searchHits.getAt(0).getId()); assertNotEquals("3", searchHits.getAt(1).getId()); }); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java index c84d8612b1d4..97e21f64e264 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoBoundingBoxQueryIntegTestCase.java @@ -102,7 +102,7 @@ public abstract class GeoBoundingBoxQueryIntegTestCase extends ESIntegTestCase { client().prepareSearch() // from NY .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); @@ -114,7 +114,7 @@ public abstract class GeoBoundingBoxQueryIntegTestCase extends ESIntegTestCase { client().prepareSearch() // from NY .setQuery(geoBoundingBoxQuery("location").setCorners(40.73, -74.1, 40.717, -73.99)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("1"), equalTo("3"), equalTo("5"))); @@ -126,7 +126,7 @@ public abstract class GeoBoundingBoxQueryIntegTestCase extends ESIntegTestCase { client().prepareSearch() // top == bottom && left == right .setQuery(geoBoundingBoxQuery("location").setCorners(40.7143528, -74.0059731, 40.7143528, -74.0059731)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("1")); @@ -138,7 +138,7 @@ public abstract class GeoBoundingBoxQueryIntegTestCase extends ESIntegTestCase { client().prepareSearch() // top == bottom .setQuery(geoBoundingBoxQuery("location").setCorners(40.759011, -74.00009, 40.759011, -73.0059731)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("2")); @@ -150,7 +150,7 @@ public abstract class GeoBoundingBoxQueryIntegTestCase extends ESIntegTestCase { client().prepareSearch() // left == right .setQuery(geoBoundingBoxQuery("location").setCorners(41.8, -73.9844722, 40.7, -73.9844722)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), equalTo("2")); @@ -163,7 +163,7 @@ public abstract class GeoBoundingBoxQueryIntegTestCase extends ESIntegTestCase { client().prepareSearch() // from NY .setQuery(geoDistanceQuery("location").point(40.5, -73.9).distance(25, DistanceUnit.KILOMETERS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); for (SearchHit hit : response.getHits()) { assertThat(hit.getId(), anyOf(equalTo("7"), equalTo("4"))); diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java index 4e47b0c51177..bb57cb132daa 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/GeoShapeQueryTestCase.java @@ -143,7 +143,7 @@ public abstract class GeoShapeQueryTestCase extends BaseShapeQueryTestCase { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertNotEquals("1", response.getHits().getAt(0).getId()); assertNotEquals("1", response.getHits().getAt(1).getId()); }); diff --git a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java index f1ff3492426a..a93f3b7eaf10 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/CorruptionUtils.java @@ -73,7 +73,7 @@ public final class CorruptionUtils { long checksumAfterCorruption; long actualChecksumAfterCorruption; - try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString(), IOContext.DEFAULT)) { + try (ChecksumIndexInput input = dir.openChecksumInput(fileToCorrupt.getFileName().toString())) { assertThat(input.getFilePointer(), is(0L)); input.seek(input.length() - CodecUtil.footerLength()); checksumAfterCorruption = input.getChecksum(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 87a834d6424b..d7c5c598ce97 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -908,9 +908,11 @@ public abstract class ESIntegTestCase extends ESTestCase { /** Ensures the result counts are as expected, and logs the results if different */ public void assertResultsAndLogOnFailure(long expectedResults, SearchResponse searchResponse) { final TotalHits totalHits = searchResponse.getHits().getTotalHits(); - if (totalHits.value != expectedResults || totalHits.relation != TotalHits.Relation.EQUAL_TO) { + if (totalHits.value() != expectedResults || totalHits.relation() != TotalHits.Relation.EQUAL_TO) { StringBuilder sb = new StringBuilder("search result contains ["); - String value = Long.toString(totalHits.value) + (totalHits.relation == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO ? "+" : ""); + String value = Long.toString(totalHits.value()) + (totalHits.relation() == TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO + ? "+" + : ""); sb.append(value).append("] results. expected [").append(expectedResults).append("]"); String failMsg = sb.toString(); for (SearchHit hit : searchResponse.getHits().getHits()) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 31c8e5bc3d45..e1ba661eb24d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -33,6 +33,8 @@ import org.apache.logging.log4j.core.layout.PatternLayout; import org.apache.logging.log4j.status.StatusConsoleListener; import org.apache.logging.log4j.status.StatusData; import org.apache.logging.log4j.status.StatusLogger; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.tests.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.tests.util.TestRuleMarkFailure; @@ -208,6 +210,7 @@ import java.util.stream.IntStream; import java.util.stream.LongStream; import java.util.stream.Stream; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomBoolean; import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.Matchers.anyOf; @@ -2637,4 +2640,43 @@ public abstract class ESTestCase extends LuceneTestCase { throw new AssertionError("Failed to verify search contexts", e); } } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r) { + return newSearcher(r, true); + } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap) { + return newSearcher(r, maybeWrap, true); + } + + /** + * Create a new searcher over the reader. This searcher might randomly use threads. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap, boolean wrapWithAssertions) { + return newSearcher(r, maybeWrap, wrapWithAssertions, randomBoolean()); + } + + /** + * Create a new searcher over the reader. + * Provides the same functionality as {@link LuceneTestCase#newSearcher(IndexReader, boolean, boolean, boolean)}, + * with the only difference that concurrency will only ever be inter-segment and never intra-segment. + */ + public static IndexSearcher newSearcher(IndexReader r, boolean maybeWrap, boolean wrapWithAssertions, boolean useThreads) { + if (useThreads) { + return newSearcher(r, maybeWrap, wrapWithAssertions, Concurrency.INTER_SEGMENT); + } + return newSearcher(r, maybeWrap, wrapWithAssertions, Concurrency.NONE); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java index db30f6e91f03..42439b5d5785 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java @@ -86,13 +86,6 @@ public class ThrowingLeafReaderWrapper extends SequentialStoredFieldsLeafReader return terms; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields fields = super.getTermVectors(docID); - thrower.maybeThrow(Flags.TermVectors); - return fields == null ? null : new ThrowingFields(fields, thrower); - } - /** * Wraps a Fields but with additional asserts */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 552e301650d9..5851fc709d14 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/test/framework/src/main/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -314,7 +314,7 @@ public class ElasticsearchAssertions { public static void assertHitCount(SearchResponse countResponse, long expectedHitCount) { final TotalHits totalHits = countResponse.getHits().getTotalHits(); - if (totalHits.relation != TotalHits.Relation.EQUAL_TO || totalHits.value != expectedHitCount) { + if (totalHits.relation() != TotalHits.Relation.EQUAL_TO || totalHits.value() != expectedHitCount) { fail("Count is " + totalHits + " but " + expectedHitCount + " was expected. " + formatShardStatus(countResponse)); } } @@ -346,7 +346,7 @@ public class ElasticsearchAssertions { public static void assertSearchHit(SearchResponse searchResponse, int number, Matcher matcher) { assertThat("SearchHit number must be greater than 0", number, greaterThan(0)); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo((long) number)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo((long) number)); assertThat(searchResponse.getHits().getAt(number - 1), matcher); } @@ -409,13 +409,13 @@ public class ElasticsearchAssertions { responses.add(scrollResponse); int retrievedDocsCount = 0; try { - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo((long) expectedTotalHitCount)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo((long) expectedTotalHitCount)); retrievedDocsCount += scrollResponse.getHits().getHits().length; responseConsumer.accept(responses.size(), scrollResponse); while (scrollResponse.getHits().getHits().length > 0) { scrollResponse = client.prepareSearchScroll(scrollResponse.getScrollId()).setScroll(keepAlive).get(); responses.add(scrollResponse); - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo((long) expectedTotalHitCount)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo((long) expectedTotalHitCount)); retrievedDocsCount += scrollResponse.getHits().getHits().length; responseConsumer.accept(responses.size(), scrollResponse); } @@ -704,8 +704,8 @@ public class ElasticsearchAssertions { assertThat(query, instanceOf(BooleanQuery.class)); BooleanQuery q = (BooleanQuery) query; assertThat(q.clauses(), hasSize(greaterThan(i))); - assertThat(q.clauses().get(i).getQuery(), instanceOf(subqueryType)); - return subqueryType.cast(q.clauses().get(i).getQuery()); + assertThat(q.clauses().get(i).query(), instanceOf(subqueryType)); + return subqueryType.cast(q.clauses().get(i).query()); } /** diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java index 885e02a8b5e6..f517c03468bc 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/TimeSeriesRateAggregatorTests.java @@ -69,8 +69,12 @@ public class TimeSeriesRateAggregatorTests extends AggregatorTestCase { }; AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field"), dimensionField("dim")); testCase(iw -> { - iw.addDocuments(docs(1000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(1000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(1000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(1000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig); } @@ -109,8 +113,12 @@ public class TimeSeriesRateAggregatorTests extends AggregatorTestCase { AggTestConfig aggTestConfig = new AggTestConfig(tsBuilder, timeStampField(), counterField("counter_field"), dimensionField("dim")) .withSplitLeavesIntoSeperateAggregators(false); testCase(iw -> { - iw.addDocuments(docs(2000, "1", 15, 37, 60, /*reset*/ 14)); - iw.addDocuments(docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)); + for (Document document : docs(2000, "1", 15, 37, 60, /*reset*/ 14)) { + iw.addDocument(document); + } + for (Document document : docs(2000, "2", 74, 150, /*reset*/ 50, 90, /*reset*/ 40)) { + iw.addDocument(document); + } }, verifier, aggTestConfig); } diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java index 20da254657c1..04f0563e433a 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/stringstats/StringStatsAggregatorTests.java @@ -170,7 +170,7 @@ public class StringStatsAggregatorTests extends AggregatorTestCase { } public void testQueryFiltering() throws IOException { - testAggregation(new TermInSetQuery("text", new BytesRef("test0"), new BytesRef("test1")), iw -> { + testAggregation(new TermInSetQuery("text", List.of(new BytesRef("test0"), new BytesRef("test1"))), iw -> { for (int i = 0; i < 10; i++) { iw.addDocument(singleton(new TextField("text", "test" + i, Field.Store.NO))); } diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java index 52ecc40c957b..aee344777779 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java @@ -314,9 +314,9 @@ public abstract class AsyncSearchIntegTestCase extends ESIntegTestCase { assertThat(newResponse.getSearchResponse().getShardFailures().length, equalTo(numFailures)); assertNull(newResponse.getSearchResponse().getAggregations()); assertNotNull(newResponse.getSearchResponse().getHits().getTotalHits()); - assertThat(newResponse.getSearchResponse().getHits().getTotalHits().value, equalTo(0L)); + assertThat(newResponse.getSearchResponse().getHits().getTotalHits().value(), equalTo(0L)); assertThat( - newResponse.getSearchResponse().getHits().getTotalHits().relation, + newResponse.getSearchResponse().getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO) ); } else { diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java index 302bb68af6c6..fd4463df07a7 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSingleNodeTests.java @@ -62,7 +62,7 @@ public class AsyncSearchSingleNodeTests extends ESSingleNodeTestCase { assertEquals(10, searchResponse.getSuccessfulShards()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(0, searchResponse.getShardFailures().length); - assertEquals(10, searchResponse.getHits().getTotalHits().value); + assertEquals(10, searchResponse.getHits().getTotalHits().value()); assertEquals(0, searchResponse.getHits().getHits().length); StringTerms terms = searchResponse.getAggregations().get("text"); assertEquals(1, terms.getBuckets().size()); @@ -106,7 +106,7 @@ public class AsyncSearchSingleNodeTests extends ESSingleNodeTestCase { assertEquals(10, searchResponse.getTotalShards()); assertEquals(5, searchResponse.getSuccessfulShards()); assertEquals(5, searchResponse.getFailedShards()); - assertEquals(10, searchResponse.getHits().getTotalHits().value); + assertEquals(10, searchResponse.getHits().getTotalHits().value()); assertEquals(5, searchResponse.getHits().getHits().length); StringTerms terms = searchResponse.getAggregations().get("text"); assertEquals(1, terms.getBuckets().size()); diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java index 95b2324d03b5..16645e7523c3 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/common/BlobCacheBufferedIndexInput.java @@ -390,12 +390,11 @@ public abstract class BlobCacheBufferedIndexInput extends IndexInput implements /** Returns default buffer sizes for the given {@link IOContext} */ public static int bufferSize(IOContext context) { - switch (context.context) { + switch (context.context()) { case MERGE: return MERGE_BUFFER_SIZE; case DEFAULT: case FLUSH: - case READ: default: return BUFFER_SIZE; } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java index 6b390ab5747a..164e6ed5406a 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRestoreSourceService.java @@ -245,7 +245,7 @@ public class CcrRestoreSourceService extends AbstractLifecycleComponent implemen private long readFileBytes(String fileName, ByteArray reference) throws IOException { try (Releasable ignored = keyedLock.acquire(fileName)) { - var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.READ; + var context = fileName.startsWith(IndexFileNames.SEGMENTS) ? IOContext.READONCE : IOContext.DEFAULT; final IndexInput indexInput = cachedInputs.computeIfAbsent(fileName, f -> { try { return commitRef.getIndexCommit().getDirectory().openInput(fileName, context); diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 1b7875e4a36b..618489abd687 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -703,7 +703,7 @@ public abstract class CcrIntegTestCase extends ESTestCase { request.source(new SearchSourceBuilder().size(0)); assertResponse(client.search(request), response -> { assertNotNull(response.getHits().getTotalHits()); - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(numDocsReplicated)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(numDocsReplicated)); }); }, 60, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java index 0fea3c0d3b74..1bf52b663b30 100644 --- a/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java +++ b/x-pack/plugin/core/src/internalClusterTest/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotIT.java @@ -274,7 +274,7 @@ public class SourceOnlySnapshotIT extends AbstractSnapshotIntegTestCase { }; assertResponse(prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC).setSize(numDocsExpected), searchResponse -> { assertConsumer.accept(searchResponse, sourceHadDeletions); - assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value); + assertEquals(numDocsExpected, searchResponse.getHits().getTotalHits().value()); }); SearchResponse searchResponse = prepareSearch(index).addSort(SeqNoFieldMapper.NAME, SortOrder.ASC) .setScroll(TimeValue.timeValueMinutes(1)) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java index e66d41d08943..12864dd66a85 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/index/engine/frozen/RewriteCachingDirectoryReader.java @@ -9,9 +9,9 @@ package org.elasticsearch.index.engine.frozen; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.ByteVectorValues; import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexWriter; @@ -23,7 +23,6 @@ import org.apache.lucene.index.PointValues; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.TermVectors; import org.apache.lucene.index.Terms; @@ -214,6 +213,11 @@ final class RewriteCachingDirectoryReader extends DirectoryReader { throw new UnsupportedOperationException(); } + @Override + public DocValuesSkipper getDocValuesSkipper(String field) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public FloatVectorValues getFloatVectorValues(String field) throws IOException { throw new UnsupportedOperationException(); @@ -257,11 +261,6 @@ final class RewriteCachingDirectoryReader extends DirectoryReader { throw new UnsupportedOperationException(); } - @Override - public Fields getTermVectors(int docId) { - throw new UnsupportedOperationException(); - } - @Override public TermVectors termVectors() throws IOException { throw new UnsupportedOperationException(); @@ -282,11 +281,6 @@ final class RewriteCachingDirectoryReader extends DirectoryReader { return maxDoc; } - @Override - public void document(int docID, StoredFieldVisitor visitor) { - throw new UnsupportedOperationException(); - } - @Override protected void doClose() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java index 093ec031d0b3..421a306babf2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshot.java @@ -254,6 +254,7 @@ public class SourceOnlySnapshot { false, IndexOptions.NONE, DocValuesType.NONE, + fieldInfo.docValuesSkipIndexType(), -1, fieldInfo.attributes(), 0, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java index dea158b42507..d315f09ebda8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/AbstractTransportGetResourcesAction.java @@ -105,7 +105,7 @@ public abstract class AbstractTransportGetResourcesAction< listener.delegateFailure((l, response) -> { List docs = new ArrayList<>(); Set foundResourceIds = new HashSet<>(); - long totalHitCount = response.getHits().getTotalHits().value; + long totalHitCount = response.getHits().getTotalHits().value(); for (SearchHit hit : response.getHits().getHits()) { try ( XContentParser parser = XContentHelper.createParserNotCompressed( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java index de43f744c307..4e5f97acacf6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/Evaluation.java @@ -145,7 +145,7 @@ public interface Evaluation extends ToXContentObject, NamedWriteable { */ default void process(SearchResponse searchResponse) { Objects.requireNonNull(searchResponse); - if (searchResponse.getHits().getTotalHits().value == 0) { + if (searchResponse.getHits().getTotalHits().value() == 0) { String requiredFieldsString = String.join(", ", getRequiredFields()); throw ExceptionsHelper.badRequestException("No documents found containing all the required fields [{}]", requiredFieldsString); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java index 3154fe5999b8..129619f6976e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/ScrollHelper.java @@ -79,19 +79,19 @@ public final class ScrollHelper { } } - if (results.size() > resp.getHits().getTotalHits().value) { + if (results.size() > resp.getHits().getTotalHits().value()) { clearScroll.accept(lastResponse); listener.onFailure( new IllegalStateException( "scrolling returned more hits [" + results.size() + "] than expected [" - + resp.getHits().getTotalHits().value + + resp.getHits().getTotalHits().value() + "] so bailing out to prevent unbounded " + "memory consumption." ) ); - } else if (results.size() == resp.getHits().getTotalHits().value) { + } else if (results.size() == resp.getHits().getTotalHits().value()) { clearScroll.accept(resp); // Finally, return the list of the entity listener.onResponse(Collections.unmodifiableList(results)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java index 0061870c73cc..32b12c834dd9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SuggestProfilesResponse.java @@ -55,8 +55,8 @@ public class SuggestProfilesResponse extends ActionResponse implements ToXConten builder.field("took", tookInMillis); builder.startObject("total"); { - builder.field("value", totalHits.value); - builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + builder.field("value", totalHits.value()); + builder.field("relation", totalHits.relation() == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); } builder.endObject(); builder.startArray("profiles"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java index 09a49c53ee1a..908f58c5f914 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReader.java @@ -155,17 +155,6 @@ public final class FieldSubsetReader extends SequentialStoredFieldsLeafReader { return fieldInfos; } - @Override - public Fields getTermVectors(int docID) throws IOException { - Fields f = super.getTermVectors(docID); - if (f == null) { - return null; - } - f = new FieldFilterFields(f); - // we need to check for emptyness, so we can return null: - return f.iterator().hasNext() ? f : null; - } - @Override public TermVectors termVectors() throws IOException { TermVectors termVectors = super.termVectors(); @@ -264,11 +253,6 @@ public final class FieldSubsetReader extends SequentialStoredFieldsLeafReader { return state; } - @Override - public void document(final int docID, final StoredFieldVisitor visitor) throws IOException { - super.document(docID, new FieldSubsetStoredFieldVisitor(visitor)); - } - @Override protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) { return new FieldSubsetStoredFieldsReader(reader); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java index c85a648761ca..5ba5c1fd1218 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ApplicationPermission.java @@ -187,7 +187,7 @@ public final class ApplicationPermission { } private boolean grants(ApplicationPrivilege other, Automaton resource) { - return matchesPrivilege(other) && Operations.subsetOf(resource, this.resourceAutomaton); + return matchesPrivilege(other) && Automatons.subsetOf(resource, this.resourceAutomaton); } private boolean matchesPrivilege(ApplicationPrivilege other) { @@ -202,7 +202,7 @@ public final class ApplicationPermission { } return Operations.isEmpty(privilege.getAutomaton()) == false && Operations.isEmpty(other.getAutomaton()) == false - && Operations.subsetOf(other.getAutomaton(), privilege.getAutomaton()); + && Automatons.subsetOf(other.getAutomaton(), privilege.getAutomaton()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java index 9c41786f39ee..4e608281a785 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/ClusterPermission.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.permission; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RestrictedIndices; @@ -215,7 +214,7 @@ public class ClusterPermission { @Override public final boolean implies(final PermissionCheck permissionCheck) { if (permissionCheck instanceof ActionBasedPermissionCheck) { - return Operations.subsetOf(((ActionBasedPermissionCheck) permissionCheck).automaton, this.automaton) + return Automatons.subsetOf(((ActionBasedPermissionCheck) permissionCheck).automaton, this.automaton) && doImplies((ActionBasedPermissionCheck) permissionCheck); } return false; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java index f3c2d9f62e40..235d7419d2bf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/FieldPermissions.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.common.Strings; @@ -34,8 +33,6 @@ import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; -import static org.apache.lucene.util.automaton.Operations.subsetOf; - /** * Stores patterns to fields which access is granted or denied to and maintains an automaton that can be used to check if permission is * allowed for a specific field. @@ -175,10 +172,14 @@ public final class FieldPermissions implements Accountable, CacheKey { deniedFieldsAutomaton = Automatons.patterns(deniedFields); } - grantedFieldsAutomaton = MinimizationOperations.minimize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - deniedFieldsAutomaton = MinimizationOperations.minimize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + grantedFieldsAutomaton = Operations.removeDeadStates( + Operations.determinize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + ); + deniedFieldsAutomaton = Operations.removeDeadStates( + Operations.determinize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT) + ); - if (subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) { + if (Automatons.subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) { throw new ElasticsearchSecurityException( "Exceptions for field permissions must be a subset of the " + "granted fields but " diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java index e1b72cc43b38..558f8e6f22ac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java @@ -288,7 +288,7 @@ public final class IndicesPermission { if (false == Operations.isEmpty(checkIndexAutomaton)) { Automaton allowedIndexPrivilegesAutomaton = null; for (var indexAndPrivilegeAutomaton : indexGroupAutomatons.entrySet()) { - if (Operations.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { + if (Automatons.subsetOf(checkIndexAutomaton, indexAndPrivilegeAutomaton.getValue())) { if (allowedIndexPrivilegesAutomaton != null) { allowedIndexPrivilegesAutomaton = Automatons.unionAndMinimize( Arrays.asList(allowedIndexPrivilegesAutomaton, indexAndPrivilegeAutomaton.getKey()) @@ -301,7 +301,7 @@ public final class IndicesPermission { for (String privilege : checkForPrivileges) { IndexPrivilege indexPrivilege = IndexPrivilege.get(Collections.singleton(privilege)); if (allowedIndexPrivilegesAutomaton != null - && Operations.subsetOf(indexPrivilege.getAutomaton(), allowedIndexPrivilegesAutomaton)) { + && Automatons.subsetOf(indexPrivilege.getAutomaton(), allowedIndexPrivilegesAutomaton)) { if (resourcePrivilegesMapBuilder != null) { resourcePrivilegesMapBuilder.addResourcePrivilege(forIndexPattern, privilege, Boolean.TRUE); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java index 68e3f11751aa..7434128f0312 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/Privilege.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.privilege; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.util.Maps; import org.elasticsearch.xpack.core.security.support.Automatons; @@ -90,7 +89,7 @@ public class Privilege { privileges.forEach( (name, priv) -> subsetCount.put( name, - privileges.values().stream().filter(p2 -> p2 != priv && Operations.subsetOf(priv.automaton, p2.automaton)).count() + privileges.values().stream().filter(p2 -> p2 != priv && Automatons.subsetOf(priv.automaton, p2.automaton)).count() ) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java index a6347d8b7ec7..201cb4b69e47 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/support/Automatons.java @@ -9,9 +9,10 @@ package org.elasticsearch.xpack.core.security.support; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; +import org.apache.lucene.util.automaton.StatePair; +import org.apache.lucene.util.automaton.Transition; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.settings.Setting; @@ -20,6 +21,7 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Predicates; import org.elasticsearch.core.TimeValue; +import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -223,7 +225,10 @@ public final class Automatons { ); } String regex = pattern.substring(1, pattern.length() - 1); - return new RegExp(regex).toAutomaton(); + return Operations.determinize( + new RegExp(regex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + DEFAULT_DETERMINIZE_WORK_LIMIT + ); } else if (pattern.equals("*")) { return MATCH_ALL; } else { @@ -269,7 +274,7 @@ public final class Automatons { } i += length; } - return concatenate(automata); + return Operations.determinize(concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } public static Automaton unionAndMinimize(Collection automata) { @@ -288,7 +293,7 @@ public final class Automatons { } private static Automaton minimize(Automaton automaton) { - return MinimizationOperations.minimize(automaton, maxDeterminizedStates); + return Operations.determinize(automaton, maxDeterminizedStates); } public static Predicate predicate(String... patterns) { @@ -329,7 +334,8 @@ public final class Automatons { } else if (automaton == EMPTY) { return Predicates.never(); } - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, maxDeterminizedStates); + automaton = Operations.determinize(automaton, maxDeterminizedStates); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); return new Predicate() { @Override public boolean test(String s) { @@ -368,4 +374,72 @@ public final class Automatons { throw new IllegalArgumentException("recordPatterns is set to false"); } } + + /** + * Returns true if the language of a1 is a subset of the language of a2. + * Both automata must be determinized and must have no dead states. + * + *

    Complexity: quadratic in number of states. + * Copied of Lucene's AutomatonTestUtil + */ + public static boolean subsetOf(Automaton a1, Automaton a2) { + if (a1.isDeterministic() == false) { + throw new IllegalArgumentException("a1 must be deterministic"); + } + if (a2.isDeterministic() == false) { + throw new IllegalArgumentException("a2 must be deterministic"); + } + assert Operations.hasDeadStatesFromInitial(a1) == false; + assert Operations.hasDeadStatesFromInitial(a2) == false; + if (a1.getNumStates() == 0) { + // Empty language is alwyas a subset of any other language + return true; + } else if (a2.getNumStates() == 0) { + return Operations.isEmpty(a1); + } + + // TODO: cutover to iterators instead + Transition[][] transitions1 = a1.getSortedTransitions(); + Transition[][] transitions2 = a2.getSortedTransitions(); + ArrayDeque worklist = new ArrayDeque<>(); + HashSet visited = new HashSet<>(); + StatePair p = new StatePair(0, 0); + worklist.add(p); + visited.add(p); + while (worklist.size() > 0) { + p = worklist.removeFirst(); + if (a1.isAccept(p.s1) && a2.isAccept(p.s2) == false) { + return false; + } + Transition[] t1 = transitions1[p.s1]; + Transition[] t2 = transitions2[p.s2]; + for (int n1 = 0, b2 = 0; n1 < t1.length; n1++) { + while (b2 < t2.length && t2[b2].max < t1[n1].min) { + b2++; + } + int min1 = t1[n1].min, max1 = t1[n1].max; + + for (int n2 = b2; n2 < t2.length && t1[n1].max >= t2[n2].min; n2++) { + if (t2[n2].min > min1) { + return false; + } + if (t2[n2].max < Character.MAX_CODE_POINT) { + min1 = t2[n2].max + 1; + } else { + min1 = Character.MAX_CODE_POINT; + max1 = Character.MIN_CODE_POINT; + } + StatePair q = new StatePair(t1[n1].dest, t2[n2].dest); + if (visited.contains(q) == false) { + worklist.add(q); + visited.add(q); + } + } + if (min1 <= max1) { + return false; + } + } + } + return true; + } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java index b4952373dfdd..92568c4f31c1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/termsenum/action/SimpleTermCountEnum.java @@ -13,6 +13,7 @@ import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.util.AttributeSource; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.IOBooleanSupplier; import org.elasticsearch.index.mapper.MappedFieldType; import java.io.IOException; @@ -69,6 +70,11 @@ public class SimpleTermCountEnum extends TermsEnum { throw new UnsupportedOperationException(); } + @Override + public IOBooleanSupplier prepareSeekExact(BytesRef bytesRef) throws IOException { + throw new UnsupportedOperationException(); + } + @Override public boolean seekExact(BytesRef text) throws IOException { throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java index e39ddc170c0a..54390365c62a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotShardTests.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.MatchAllDocsQuery; @@ -400,14 +401,14 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase { try (Engine.Searcher searcher = restoredShard.acquireSearcher("test")) { assertEquals(searcher.getIndexReader().maxDoc(), seqNoStats.getLocalCheckpoint()); TopDocs search = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); - assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value); + assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value()); search = searcher.search( new MatchAllDocsQuery(), Integer.MAX_VALUE, new Sort(new SortField(SeqNoFieldMapper.NAME, SortField.Type.LONG)), false ); - assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value); + assertEquals(searcher.getIndexReader().numDocs(), search.totalHits.value()); long previous = -1; for (ScoreDoc doc : search.scoreDocs) { FieldDoc fieldDoc = (FieldDoc) doc; @@ -430,8 +431,9 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase { assertEquals(original.exists(), restored.exists()); if (original.exists()) { - Document document = original.docIdAndVersion().reader.document(original.docIdAndVersion().docId); - Document restoredDocument = restored.docIdAndVersion().reader.document(restored.docIdAndVersion().docId); + StoredFields storedFields = original.docIdAndVersion().reader.storedFields(); + Document document = storedFields.document(original.docIdAndVersion().docId); + Document restoredDocument = storedFields.document(restored.docIdAndVersion().docId); for (IndexableField field : document) { assertEquals(document.get(field.name()), restoredDocument.get(field.name())); } @@ -470,7 +472,7 @@ public class SourceOnlySnapshotShardTests extends IndexShardTestCase { for (int i = 0; i < leafReader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { rootFieldsVisitor.reset(); - leafReader.document(i, rootFieldsVisitor); + leafReader.storedFields().document(i, rootFieldsVisitor); rootFieldsVisitor.postProcess(targetShard.mapperService()::fieldType); String id = rootFieldsVisitor.id(); BytesReference source = rootFieldsVisitor.source(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java index 65d057408f8b..8433f38e40a0 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotTests.java @@ -32,6 +32,7 @@ import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.SnapshotDeletionPolicy; import org.apache.lucene.index.SoftDeletesDirectoryReaderWrapper; import org.apache.lucene.index.StandardDirectoryReader; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.FieldExistsQuery; import org.apache.lucene.search.IndexSearcher; @@ -108,8 +109,10 @@ public class SourceOnlySnapshotTests extends ESTestCase { logger.warn(snapReader + " " + reader); assertEquals(snapReader.maxDoc(), reader.maxDoc()); assertEquals(snapReader.numDocs(), reader.numDocs()); + StoredFields snapStoredFields = snapReader.storedFields(); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < snapReader.maxDoc(); i++) { - assertEquals(snapReader.document(i).get("_source"), reader.document(i).get("_source")); + assertEquals(snapStoredFields.document(i).get("_source"), storedFields.document(i).get("_source")); } for (LeafReaderContext ctx : snapReader.leaves()) { if (ctx.reader() instanceof SegmentReader) { @@ -188,12 +191,14 @@ public class SourceOnlySnapshotTests extends ESTestCase { try (DirectoryReader snapReader = DirectoryReader.open(wrappedDir)) { assertEquals(snapReader.maxDoc(), 3); assertEquals(snapReader.numDocs(), 2); + StoredFields snapStoredFields = snapReader.storedFields(); + StoredFields storedFields = reader.storedFields(); for (int i = 0; i < 3; i++) { - assertEquals(snapReader.document(i).get("src"), reader.document(i).get("src")); + assertEquals(snapStoredFields.document(i).get("src"), storedFields.document(i).get("src")); } IndexSearcher searcher = newSearcher(snapReader); TopDocs id = searcher.search(new TermQuery(new Term("id", "1")), 10); - assertEquals(0, id.totalHits.value); + assertEquals(0, id.totalHits.value()); } targetDir = newDirectory(targetDir); @@ -321,7 +326,7 @@ public class SourceOnlySnapshotTests extends ESTestCase { try (DirectoryReader snapReader = DirectoryReader.open(wrappedDir)) { assertEquals(snapReader.maxDoc(), 1); assertEquals(snapReader.numDocs(), 1); - assertEquals("3", snapReader.document(0).getField("rank").stringValue()); + assertEquals("3", snapReader.storedFields().document(0).getField("rank").stringValue()); } try (IndexReader writerReader = DirectoryReader.open(writer)) { assertEquals(writerReader.maxDoc(), 2); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java index bb727204e265..114ad90354c6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/search/WeightedTokensQueryBuilderTests.java @@ -279,7 +279,7 @@ public class WeightedTokensQueryBuilderTests extends AbstractQueryTestCase boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java index c40dd00e0e35..6fe271d1b05e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/DocumentSubsetReaderTests.java @@ -96,25 +96,25 @@ public class DocumentSubsetReaderTests extends ESTestCase { ); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); TopDocs result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(0)); indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value2")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(1)); // this doc has been marked as deleted: indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value3")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(0)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(0L)); + assertThat(result.totalHits.value(), equalTo(0L)); indexSearcher = newSearcher(DocumentSubsetReader.wrap(directoryReader, bitsetCache, new TermQuery(new Term("field", "value4")))); assertThat(indexSearcher.getIndexReader().numDocs(), equalTo(1)); result = indexSearcher.search(new MatchAllDocsQuery(), 1); - assertThat(result.totalHits.value, equalTo(1L)); + assertThat(result.totalHits.value(), equalTo(1L)); assertThat(result.scoreDocs[0].doc, equalTo(3)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java index 560dee9b5843..db250b16eab1 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/FieldSubsetReaderTests.java @@ -30,6 +30,7 @@ import org.apache.lucene.index.FilterDirectoryReader; import org.apache.lucene.index.FloatVectorValues; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.KnnVectorValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; @@ -205,8 +206,9 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { FloatVectorValues vectorValues = leafReader.getFloatVectorValues("fieldA"); assertEquals(3, vectorValues.dimension()); assertEquals(1, vectorValues.size()); - assertEquals(0, vectorValues.nextDoc()); - assertNotNull(vectorValues.vectorValue()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertNotNull(vectorValues.vectorValue(iterator.index())); TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE); assertNotNull(topDocs); @@ -215,7 +217,7 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { // Check that we can't see fieldB assertNull(leafReader.getFloatVectorValues("fieldB")); topDocs = leafReader.searchNearestVectors("fieldB", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs.length); TestUtil.checkReader(ir); @@ -239,8 +241,9 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { ByteVectorValues vectorValues = leafReader.getByteVectorValues("fieldA"); assertEquals(3, vectorValues.dimension()); assertEquals(1, vectorValues.size()); - assertEquals(0, vectorValues.nextDoc()); - assertNotNull(vectorValues.vectorValue()); + KnnVectorValues.DocIndexIterator iterator = vectorValues.iterator(); + assertEquals(0, iterator.nextDoc()); + assertNotNull(vectorValues.vectorValue(iterator.index())); TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new byte[] { 1, 1, 1 }, 5, null, Integer.MAX_VALUE); assertNotNull(topDocs); @@ -249,7 +252,7 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { // Check that we can't see fieldB assertNull(leafReader.getByteVectorValues("fieldB")); topDocs = leafReader.searchNearestVectors("fieldB", new byte[] { 1, 1, 1 }, 5, null, Integer.MAX_VALUE); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); assertEquals(0, topDocs.scoreDocs.length); TestUtil.checkReader(ir); @@ -274,11 +277,6 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals("testA", d2.get("fieldA")); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -306,11 +304,6 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(new BytesRef("testA"), d2.getBinaryValue("fieldA")); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -338,11 +331,6 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -370,11 +358,6 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1L, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -402,11 +385,6 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1F, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -434,11 +412,6 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals(1D, d2.getField("fieldA").numericValue()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -468,7 +441,7 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldA"))); // see only one field - Fields vectors = ir.getTermVectors(0); + Fields vectors = ir.termVectors().get(0); Set seenFields = new HashSet<>(); for (String field : vectors) { seenFields.add(field); @@ -615,7 +588,6 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { assertNotNull(dv); assertTrue(dv.advanceExact(0)); assertEquals(0, dv.nextOrd()); - assertEquals(SortedSetDocValues.NO_MORE_ORDS, dv.nextOrd()); assertEquals(new BytesRef("testA"), dv.lookupOrd(0)); assertNull(segmentReader.getSortedSetDocValues("fieldB")); @@ -702,11 +674,6 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(automaton)); // see only one field - { - Document d2 = ir.document(0); - assertEquals(1, d2.getFields().size()); - assertEquals("{\"fieldA\":\"testA\"}", d2.getBinaryValue(SourceFieldMapper.NAME).utf8ToString()); - } { Document d2 = ir.storedFields().document(0); assertEquals(1, d2.getFields().size()); @@ -1201,7 +1168,7 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { DirectoryReader ir = FieldSubsetReader.wrap(DirectoryReader.open(iw), new CharacterRunAutomaton(Automata.makeString("fieldB"))); // sees no fields - assertNull(ir.getTermVectors(0)); + assertNull(ir.termVectors().get(0)); TestUtil.checkReader(ir); IOUtils.close(ir, iw, dir); @@ -1229,14 +1196,9 @@ public class FieldSubsetReaderTests extends MapperServiceTestCase { assertNull(segmentReader.terms("foo")); // see no vectors - assertNull(segmentReader.getTermVectors(0)); assertNull(segmentReader.termVectors().get(0)); // see no stored fields - { - Document document = segmentReader.document(0); - assertEquals(0, document.getFields().size()); - } { Document document = segmentReader.storedFields().document(0); assertEquals(0, document.getFields().size()); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java index df64c4f87410..4751f66cf548 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/accesscontrol/SecurityIndexReaderWrapperIntegrationTests.java @@ -193,7 +193,7 @@ public class SecurityIndexReaderWrapperIntegrationTests extends AbstractBuilderT int expectedHitCount = valuesHitCount[i]; logger.info("Going to verify hit count with query [{}] with expected total hits [{}]", parsedQuery.query(), expectedHitCount); - Integer totalHits = indexSearcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager()); + Integer totalHits = indexSearcher.search(new MatchAllDocsQuery(), new TotalHitCountCollectorManager(indexSearcher.getSlices())); assertThat(totalHits, equalTo(expectedHitCount)); assertThat(wrappedDirectoryReader.numDocs(), equalTo(expectedHitCount)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java index 265714ee6ea1..073b3b92a43a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilegeTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core.security.authz.privilege; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; import org.elasticsearch.action.delete.TransportDeleteAction; @@ -17,6 +16,7 @@ import org.elasticsearch.action.update.TransportUpdateAction; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import java.util.Collection; @@ -83,7 +83,7 @@ public class IndexPrivilegeTests extends ESTestCase { public void testRelationshipBetweenPrivileges() { assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("view_index_metadata")).automaton, IndexPrivilege.get(Set.of("manage")).automaton ), @@ -91,12 +91,12 @@ public class IndexPrivilegeTests extends ESTestCase { ); assertThat( - Operations.subsetOf(IndexPrivilege.get(Set.of("monitor")).automaton, IndexPrivilege.get(Set.of("manage")).automaton), + Automatons.subsetOf(IndexPrivilege.get(Set.of("monitor")).automaton, IndexPrivilege.get(Set.of("manage")).automaton), is(true) ); assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("create", "create_doc", "index", "delete")).automaton, IndexPrivilege.get(Set.of("write")).automaton ), @@ -104,7 +104,7 @@ public class IndexPrivilegeTests extends ESTestCase { ); assertThat( - Operations.subsetOf( + Automatons.subsetOf( IndexPrivilege.get(Set.of("create_index", "delete_index")).automaton, IndexPrivilege.get(Set.of("manage")).automaton ), @@ -122,7 +122,7 @@ public class IndexPrivilegeTests extends ESTestCase { "indices:admin/seq_no/renew_retention_lease" ).forEach(action -> assertThat(crossClusterReplication.predicate.test(action + randomAlphaOfLengthBetween(0, 8)), is(true))); assertThat( - Operations.subsetOf(crossClusterReplication.automaton, IndexPrivilege.get(Set.of("manage", "read", "monitor")).automaton), + Automatons.subsetOf(crossClusterReplication.automaton, IndexPrivilege.get(Set.of("manage", "read", "monitor")).automaton), is(true) ); @@ -139,10 +139,10 @@ public class IndexPrivilegeTests extends ESTestCase { ); assertThat( - Operations.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("manage")).automaton), + Automatons.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("manage")).automaton), is(false) ); - assertThat(Operations.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("all")).automaton), is(true)); + assertThat(Automatons.subsetOf(crossClusterReplicationInternal.automaton, IndexPrivilege.get(Set.of("all")).automaton), is(true)); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java index 6f3c435eb12f..a58acf82ea44 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/PrivilegeTests.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.core.security.authz.privilege; -import org.apache.lucene.util.automaton.Operations; +import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.TransportCancelTasksAction; import org.elasticsearch.action.admin.cluster.reroute.TransportClusterRerouteAction; @@ -218,13 +218,13 @@ public class PrivilegeTests extends ESTestCase { Set name = Sets.newHashSet(first.name().iterator().next(), second.name().iterator().next()); IndexPrivilege index = IndexPrivilege.get(name); - if (Operations.subsetOf(second.getAutomaton(), first.getAutomaton())) { - assertTrue(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); - } else if (Operations.subsetOf(first.getAutomaton(), second.getAutomaton())) { - assertTrue(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); + if (Automatons.subsetOf(second.getAutomaton(), first.getAutomaton())) { + assertTrue(AutomatonTestUtil.sameLanguage(index.getAutomaton(), first.getAutomaton())); + } else if (Automatons.subsetOf(first.getAutomaton(), second.getAutomaton())) { + assertTrue(AutomatonTestUtil.sameLanguage(index.getAutomaton(), second.getAutomaton())); } else { - assertFalse(Operations.sameLanguage(index.getAutomaton(), first.getAutomaton())); - assertFalse(Operations.sameLanguage(index.getAutomaton(), second.getAutomaton())); + assertFalse(AutomatonTestUtil.sameLanguage(index.getAutomaton(), first.getAutomaton())); + assertFalse(AutomatonTestUtil.sameLanguage(index.getAutomaton(), second.getAutomaton())); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java index 0b2e48bd20df..94f91f427e19 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/support/AutomatonsTests.java @@ -20,7 +20,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT; import static org.elasticsearch.xpack.core.security.support.Automatons.pattern; import static org.elasticsearch.xpack.core.security.support.Automatons.patterns; import static org.elasticsearch.xpack.core.security.support.Automatons.predicate; @@ -115,12 +114,12 @@ public class AutomatonsTests extends ESTestCase { } private void assertMatch(Automaton automaton, String text) { - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_DETERMINIZE_WORK_LIMIT); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); assertTrue(runAutomaton.run(text)); } private void assertMismatch(Automaton automaton, String text) { - CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton, DEFAULT_DETERMINIZE_WORK_LIMIT); + CharacterRunAutomaton runAutomaton = new CharacterRunAutomaton(automaton); assertFalse(runAutomaton.run(text)); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java index d3dcd7ae36f5..65d53d3adabe 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichPolicyRunnerTests.java @@ -125,7 +125,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = searchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -176,7 +176,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(3))); @@ -202,7 +202,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("location"), is(equalTo("POINT(10.0 10.0)"))); @@ -244,7 +244,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -286,7 +286,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("range"), is(equalTo(Map.of("lt", 10, "gt", 1)))); @@ -330,7 +330,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -376,7 +376,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { new SearchRequest(sourceIndexName).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("subnet"), is(equalTo("10.0.0.0/8"))); @@ -421,7 +421,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -460,7 +460,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -522,7 +522,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -564,7 +564,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -633,7 +633,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -688,7 +688,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("idx"), is(equalTo(targetIdx))); @@ -749,7 +749,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(3L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(3L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(5))); @@ -943,7 +943,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -993,7 +993,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1051,7 +1051,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1100,7 +1100,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1158,7 +1158,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1209,7 +1209,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1273,7 +1273,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1329,7 +1329,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1395,7 +1395,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1450,7 +1450,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { new SearchRequest(".enrich-test1").source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery())) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1519,7 +1519,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); Map dataField = ((Map) sourceDocMap.get("data")); @@ -1580,7 +1580,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021-08-19T14:00:00Z")) ) ), - enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(0L)) + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(0L)) ); assertResponse( @@ -1590,7 +1590,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { ) ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(1))); @@ -1614,7 +1614,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { SearchSourceBuilder.searchSource().query(QueryBuilders.matchQuery("data.fields.period", "2021/08/20 at 14:00")) ) ), - enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)) + enrichSearchResponse -> assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)) ); // Validate segments @@ -1657,7 +1657,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("data.field1"), is(equalTo("value1"))); @@ -1704,7 +1704,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(2))); @@ -1736,7 +1736,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -1868,7 +1868,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { ), enrichSearchResponse -> { - assertThat(enrichSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(enrichSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map enrichDocument = enrichSearchResponse.getHits().iterator().next().getSourceAsMap(); assertNotNull(enrichDocument); assertThat(enrichDocument.size(), is(equalTo(3))); @@ -1901,7 +1901,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -2007,7 +2007,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); @@ -2405,7 +2405,7 @@ public class EnrichPolicyRunnerTests extends ESSingleNodeTestCase { assertResponse( client().search(new SearchRequest(sourceIndex).source(SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()))), sourceSearchResponse -> { - assertThat(sourceSearchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(sourceSearchResponse.getHits().getTotalHits().value(), equalTo(1L)); Map sourceDocMap = sourceSearchResponse.getHits().getAt(0).getSourceAsMap(); assertNotNull(sourceDocMap); assertThat(sourceDocMap.get("field1"), is(equalTo("value1"))); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java index 00f22aca2cb9..8dbc9b0f4f43 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchActionTests.java @@ -64,7 +64,7 @@ public class EnrichShardMultiSearchActionTests extends ESSingleNodeTestCase { assertThat(response.getResponses().length, equalTo(numSearches)); for (int i = 0; i < numSearches; i++) { assertThat(response.getResponses()[i].isFailure(), is(false)); - assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getResponses()[i].getResponse().getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().size(), equalTo(1)); assertThat( response.getResponses()[i].getResponse().getHits().getHits()[0].getSourceAsMap().get("key1"), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java index cd98b43adc15..5e1fde0dfb94 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorIndexService.java @@ -1073,7 +1073,7 @@ public class ConnectorIndexService { final List connectorResults = Arrays.stream(response.getHits().getHits()) .map(ConnectorIndexService::hitToConnector) .toList(); - return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value); + return new ConnectorIndexService.ConnectorResult(connectorResults, (int) response.getHits().getTotalHits().value()); } private static ConnectorSearchResult hitToConnector(SearchHit searchHit) { @@ -1115,7 +1115,7 @@ public class ConnectorIndexService { client.search(searchRequest, new ActionListener<>() { @Override public void onResponse(SearchResponse searchResponse) { - boolean indexNameIsInUse = searchResponse.getHits().getTotalHits().value > 0L; + boolean indexNameIsInUse = searchResponse.getHits().getTotalHits().value() > 0L; listener.onResponse(indexNameIsInUse); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index 9ef895a3a578..ce6f7f0dbf2b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -417,7 +417,7 @@ public class ConnectorSyncJobIndexService { .map(ConnectorSyncJobIndexService::hitToConnectorSyncJob) .toList(); - return new ConnectorSyncJobsResult(connectorSyncJobs, (int) searchResponse.getHits().getTotalHits().value); + return new ConnectorSyncJobsResult(connectorSyncJobs, (int) searchResponse.getHits().getTotalHits().value()); } private static ConnectorSyncJobSearchResult hitToConnectorSyncJob(SearchHit searchHit) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java index 2eec155ae8ea..8bf4bbd5716b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesIndexService.java @@ -436,7 +436,7 @@ public class QueryRulesIndexService { final List rulesetResults = Arrays.stream(response.getHits().getHits()) .map(QueryRulesIndexService::hitToQueryRulesetListItem) .toList(); - return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value); + return new QueryRulesetResult(rulesetResults, (int) response.getHits().getTotalHits().value()); } private static QueryRulesetListItem hitToQueryRulesetListItem(SearchHit searchHit) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java index 9e8a8f750b76..30d533aeb9ae 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/SearchApplicationIndexService.java @@ -416,7 +416,7 @@ public class SearchApplicationIndexService { final List apps = Arrays.stream(response.getHits().getHits()) .map(SearchApplicationIndexService::hitToSearchApplicationListItem) .toList(); - return new SearchApplicationResult(apps, (int) response.getHits().getTotalHits().value); + return new SearchApplicationResult(apps, (int) response.getHits().getTotalHits().value()); } private static SearchApplicationListItem hitToSearchApplicationListItem(SearchHit searchHit) { diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java index be1d4c0871ca..2b7b8b074fa7 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java @@ -582,8 +582,8 @@ public class EqlSearchResponse extends ActionResponse implements ToXContentObjec builder.startObject(Fields.HITS); if (totalHits != null) { builder.startObject(Fields.TOTAL); - builder.field("value", totalHits.value); - builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + builder.field("value", totalHits.value()); + builder.field("relation", totalHits.relation() == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); builder.endObject(); } if (events != null) { diff --git a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt index b04d28654f1d..00c08096fd08 100644 --- a/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt +++ b/x-pack/plugin/eql/src/test/resources/querytranslator_tests.txt @@ -769,7 +769,7 @@ process where command_line regex "^.*?net.exe" regexSingleArgInsensitive process where command_line regex~ "^.*?net.exe" ; -"regexp":{"command_line":{"value":"^.*?net.exe","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe","flags_value":65791,"case_insensitive":true ; regexMultiArg @@ -781,7 +781,7 @@ process where command_line regex ("^.*?net.exe", "net\\.exe") regexMultiArgInsensitive process where command_line regex~ ("^.*?net.exe", "net\\.exe") ; -"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe","flags_value":65791,"case_insensitive":true ; regexMultiMultiArgVariant @@ -793,7 +793,7 @@ process where command_line regex ("^.*?net.exe", "net\\.exe", "C:\\\\Windows\\\\ regexMultiMultiArgVariantInsensitive process where command_line regex~ ("^.*?net.exe", "net\\.exe", "C:\\\\Windows\\\\system32\\\\net1\\s+") ; -"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe|C:\\\\Windows\\\\system32\\\\net1\\s+","flags_value":255,"case_insensitive":true +"regexp":{"command_line":{"value":"^.*?net.exe|net\\.exe|C:\\\\Windows\\\\system32\\\\net1\\s+","flags_value":65791,"case_insensitive":true ; regexMultiArgWithScript diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java index f437dc5819dc..4e559f564acb 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/RLikePattern.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.core.expression.predicate.regex; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import java.util.Objects; @@ -21,7 +22,10 @@ public class RLikePattern extends AbstractStringPattern { @Override public Automaton createAutomaton() { - return new RegExp(regexpPattern).toAutomaton(); + return Operations.determinize( + new RegExp(regexpPattern, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); } @Override diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java index 7cedbc474213..3e9cbf92727c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/regex/WildcardPattern.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.esql.core.expression.predicate.regex; import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.esql.core.util.StringUtils; @@ -39,8 +38,7 @@ public class WildcardPattern extends AbstractStringPattern { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java index 1c9c97a364fc..9633051781f4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSliceQueue.java @@ -120,7 +120,7 @@ public final class LuceneSliceQueue { } static List> segmentSlices(List leafContexts) { - IndexSearcher.LeafSlice[] gs = IndexSearcher.slices(leafContexts, MAX_DOCS_PER_SLICE, MAX_SEGMENTS_PER_SLICE); - return Arrays.stream(gs).map(g -> Arrays.stream(g.leaves).map(PartialLeafReaderContext::new).toList()).toList(); + IndexSearcher.LeafSlice[] gs = IndexSearcher.slices(leafContexts, MAX_DOCS_PER_SLICE, MAX_SEGMENTS_PER_SLICE, false); + return Arrays.stream(gs).map(g -> Arrays.stream(g.partitions).map(PartialLeafReaderContext::new).toList()).toList(); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 2e32d20a2365..0f600958b93b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.TopFieldCollector; +import org.apache.lucene.search.TopFieldCollectorManager; import org.elasticsearch.common.Strings; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocVector; @@ -230,8 +231,9 @@ public final class LuceneTopNSourceOperator extends LuceneOperator { if (sortAndFormats.isEmpty()) { throw new IllegalStateException("sorts must not be disabled in TopN"); } + // We don't use CollectorManager here as we don't retrieve the total hits and sort by score. - this.topFieldCollector = TopFieldCollector.create(sortAndFormats.get().sort, limit, 0); + this.topFieldCollector = new TopFieldCollectorManager(sortAndFormats.get().sort, limit, null, 0, false).newCollector(); } LeafCollector getLeafCollector(LeafReaderContext leafReaderContext) throws IOException { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java index e9063c9597c5..c92dc7539772 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/PartialLeafReaderContext.java @@ -8,6 +8,7 @@ package org.elasticsearch.compute.lucene; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; /** * A subset of a {@link LeafReaderContext}. @@ -16,6 +17,10 @@ import org.apache.lucene.index.LeafReaderContext; * @param maxDoc one more than the last document */ public record PartialLeafReaderContext(LeafReaderContext leafReaderContext, int minDoc, int maxDoc) { + public PartialLeafReaderContext(IndexSearcher.LeafReaderContextPartition partition) { + this(partition.ctx, partition.minDocId, partition.maxDocId); + } + public PartialLeafReaderContext(LeafReaderContext leafReaderContext) { this(leafReaderContext, 0, leafReaderContext.reader().maxDoc()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java index 6937f1a8c777..f70cfe1dc8a4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperator.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.esql.enrich; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.ConstantScoreQuery; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.LeafCollector; import org.apache.lucene.search.Query; @@ -89,7 +90,7 @@ final class EnrichQuerySourceOperator extends SourceOperator { continue; } final DocCollector collector = new DocCollector(docsBuilder); - scorer.score(collector, leaf.reader().getLiveDocs()); + scorer.score(collector, leaf.reader().getLiveDocs(), 0, DocIdSetIterator.NO_MORE_DOCS); int matches = collector.matches; if (segmentsBuilder != null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java index 09166f0cff7a..0af22a357aec 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/AutomataMatch.java @@ -37,7 +37,7 @@ public class AutomataMatch { * we couldn't get a nice toDot - so we call UTF32ToUTF8 ourselves. */ Automaton automaton = Operations.determinize(new UTF32ToUTF8().convert(utf32Automaton), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); - ByteRunAutomaton run = new ByteRunAutomaton(automaton, true, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + ByteRunAutomaton run = new ByteRunAutomaton(automaton, true); return new AutomataMatchEvaluator.Factory(source, field, run, toDot(automaton)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index 7ff09c23a140..5903d725bf9c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -441,7 +441,7 @@ public abstract class ExpressionBuilder extends IdentifierBuilder { // use the fast run variant result = new UnresolvedNamePattern( src, - new CharacterRunAutomaton(Operations.concatenate(list)), + new CharacterRunAutomaton(Operations.determinize(Operations.concatenate(list), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT)), patternString.toString(), nameString.toString() ); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java index 214c7b105335..f6668db52b93 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueMatchQuery.java @@ -69,14 +69,6 @@ final class SingleValueMatchQuery extends Query { @Override public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) { return new ConstantScoreWeight(this, boost) { - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - final ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { @@ -96,12 +88,12 @@ final class SingleValueMatchQuery extends Query { * can't do that because we need the check the number of fields. */ if (lfd instanceof LeafNumericFieldData n) { - return scorerSupplier(context, n.getLongValues(), this, boost, scoreMode); + return scorerSupplier(context, n.getLongValues(), boost, scoreMode); } if (lfd instanceof LeafOrdinalsFieldData o) { - return scorerSupplier(context, o.getOrdinalsValues(), this, boost, scoreMode); + return scorerSupplier(context, o.getOrdinalsValues(), boost, scoreMode); } - return scorerSupplier(context, lfd.getBytesValues(), this, boost, scoreMode); + return scorerSupplier(context, lfd.getBytesValues(), boost, scoreMode); } @Override @@ -113,7 +105,6 @@ final class SingleValueMatchQuery extends Query { private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedNumericDocValues sortedNumerics, - Weight weight, float boost, ScoreMode scoreMode ) throws IOException { @@ -122,16 +113,9 @@ final class SingleValueMatchQuery extends Query { // check for dense field final PointValues points = context.reader().getPointValues(fieldData.getFieldName()); if (points != null && points.getDocCount() == maxDoc) { - return new DocIdSetIteratorScorerSupplier(weight, boost, scoreMode, DocIdSetIterator.all(maxDoc)); + return new DocIdSetIteratorScorerSupplier(boost, scoreMode, DocIdSetIterator.all(maxDoc)); } else { - return new PredicateScorerSupplier( - weight, - boost, - scoreMode, - maxDoc, - MULTI_VALUE_MATCH_COST, - sortedNumerics::advanceExact - ); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, sortedNumerics::advanceExact); } } final CheckedIntPredicate predicate = doc -> { @@ -144,13 +128,12 @@ final class SingleValueMatchQuery extends Query { } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedSetDocValues sortedSetDocValues, - Weight weight, float boost, ScoreMode scoreMode ) throws IOException { @@ -159,10 +142,9 @@ final class SingleValueMatchQuery extends Query { // check for dense field final Terms terms = context.reader().terms(fieldData.getFieldName()); if (terms != null && terms.getDocCount() == maxDoc) { - return new DocIdSetIteratorScorerSupplier(weight, boost, scoreMode, DocIdSetIterator.all(maxDoc)); + return new DocIdSetIteratorScorerSupplier(boost, scoreMode, DocIdSetIterator.all(maxDoc)); } else { return new PredicateScorerSupplier( - weight, boost, scoreMode, maxDoc, @@ -181,20 +163,18 @@ final class SingleValueMatchQuery extends Query { } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } private ScorerSupplier scorerSupplier( LeafReaderContext context, SortedBinaryDocValues sortedBinaryDocValues, - Weight weight, float boost, ScoreMode scoreMode ) { final int maxDoc = context.reader().maxDoc(); if (FieldData.unwrapSingleton(sortedBinaryDocValues) != null) { return new PredicateScorerSupplier( - weight, boost, scoreMode, maxDoc, @@ -212,7 +192,7 @@ final class SingleValueMatchQuery extends Query { } return true; }; - return new PredicateScorerSupplier(weight, boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); + return new PredicateScorerSupplier(boost, scoreMode, maxDoc, MULTI_VALUE_MATCH_COST, predicate); } }; } @@ -266,13 +246,11 @@ final class SingleValueMatchQuery extends Query { private static class DocIdSetIteratorScorerSupplier extends ScorerSupplier { - private final Weight weight; private final float score; private final ScoreMode scoreMode; private final DocIdSetIterator docIdSetIterator; - private DocIdSetIteratorScorerSupplier(Weight weight, float score, ScoreMode scoreMode, DocIdSetIterator docIdSetIterator) { - this.weight = weight; + private DocIdSetIteratorScorerSupplier(float score, ScoreMode scoreMode, DocIdSetIterator docIdSetIterator) { this.score = score; this.scoreMode = scoreMode; this.docIdSetIterator = docIdSetIterator; @@ -280,7 +258,7 @@ final class SingleValueMatchQuery extends Query { @Override public Scorer get(long leadCost) { - return new ConstantScoreScorer(weight, score, scoreMode, docIdSetIterator); + return new ConstantScoreScorer(score, scoreMode, docIdSetIterator); } @Override @@ -290,23 +268,13 @@ final class SingleValueMatchQuery extends Query { } private static class PredicateScorerSupplier extends ScorerSupplier { - - private final Weight weight; private final float score; private final ScoreMode scoreMode; private final int maxDoc; private final int matchCost; private final CheckedIntPredicate predicate; - private PredicateScorerSupplier( - Weight weight, - float score, - ScoreMode scoreMode, - int maxDoc, - int matchCost, - CheckedIntPredicate predicate - ) { - this.weight = weight; + private PredicateScorerSupplier(float score, ScoreMode scoreMode, int maxDoc, int matchCost, CheckedIntPredicate predicate) { this.score = score; this.scoreMode = scoreMode; this.maxDoc = maxDoc; @@ -327,7 +295,7 @@ final class SingleValueMatchQuery extends Query { return matchCost; } }; - return new ConstantScoreScorer(weight, score, scoreMode, iterator); + return new ConstantScoreScorer(score, scoreMode, iterator); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java index 107c2af11c4f..04da5d406fbb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/EnrichQuerySourceOperatorTests.java @@ -108,7 +108,7 @@ public class EnrichQuerySourceOperatorTests extends ESTestCase { QueryList queryList = QueryList.termQueryList(uidField, mock(SearchExecutionContext.class), inputTerms, KEYWORD); assertThat(queryList.getPositionCount(), equalTo(6)); assertThat(queryList.getQuery(0), equalTo(new TermQuery(new Term("uid", new BytesRef("b2"))))); - assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", new BytesRef("c1"), new BytesRef("a2")))); + assertThat(queryList.getQuery(1), equalTo(new TermInSetQuery("uid", List.of(new BytesRef("c1"), new BytesRef("a2"))))); assertThat(queryList.getQuery(2), equalTo(new TermQuery(new Term("uid", new BytesRef("z2"))))); assertNull(queryList.getQuery(3)); assertThat(queryList.getQuery(4), equalTo(new TermQuery(new Term("uid", new BytesRef("a3"))))); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java index 2ba397a3cb3d..95444c9b2423 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/querydsl/query/SingleValueQueryTests.java @@ -232,7 +232,7 @@ public class SingleValueQueryTests extends MapperServiceTestCase { private List docFor(int i, Iterable values) { List fields = new ArrayList<>(); - fields.add(new LongField("i", i)); + fields.add(new LongField("i", i, Field.Store.NO)); fields.add(new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO)); switch (fieldType) { case "long", "integer", "short", "byte" -> { @@ -270,7 +270,10 @@ public class SingleValueQueryTests extends MapperServiceTestCase { List> fieldValues = new ArrayList<>(100); for (int i = 0; i < 100; i++) { iw.addDocument( - List.of(new LongField("i", i), new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO)) + List.of( + new LongField("i", i, Field.Store.NO), + new TextField("str", "the quick brown fox jumped over the lazy dog", Field.Store.NO) + ) ); fieldValues.add(List.of()); } diff --git a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java index 3623d3671e83..6d90b0e67ee8 100644 --- a/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java +++ b/x-pack/plugin/graph/src/internalClusterTest/java/org/elasticsearch/xpack/graph/test/GraphTests.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.graph.test; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.ShardSegments; @@ -165,7 +165,7 @@ public class GraphTests extends ESSingleNodeTestCase { VertexRequest peopleNames = hop1.addVertexRequest("people").minDocCount(1); peopleNames.addInclude("john", 1); - for (int i = 0; i < BooleanQuery.getMaxClauseCount() + 1; i++) { + for (int i = 0; i < IndexSearcher.getMaxClauseCount() + 1; i++) { peopleNames.addInclude("unknown" + i, 1); } diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 36e8eaf94c8b..b60ce13e0228 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.graph.action; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.ExceptionsHelper; @@ -564,7 +564,7 @@ public class TransportGraphExploreAction extends HandledTransportAction> entry : lastHopFindings.entrySet()) { numClauses += entry.getValue().size(); } - if (numClauses < BooleanQuery.getMaxClauseCount()) { + if (numClauses < IndexSearcher.getMaxClauseCount()) { // We can afford to build a Boolean OR query with individual // boosts for interesting terms for (Entry> entry : lastHopFindings.entrySet()) { @@ -755,7 +755,7 @@ public class TransportGraphExploreAction extends HandledTransportAction BooleanQuery.getMaxClauseCount()) { + if ((includesContainer.should().size() + termBoosts.length) > IndexSearcher.getMaxClauseCount()) { // Too many terms - we need a cheaper form of query to execute this List termValues = new ArrayList<>(); for (TermBoost tb : termBoosts) { diff --git a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java index 73c0f6d4c768..54d83af8f5d9 100644 --- a/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java +++ b/x-pack/plugin/inference/src/internalClusterTest/java/org/elasticsearch/xpack/inference/action/filter/ShardBulkInferenceActionFilterIT.java @@ -135,7 +135,7 @@ public class ShardBulkInferenceActionFilterIT extends ESIntegTestCase { SearchSourceBuilder sourceBuilder = new SearchSourceBuilder().size(0).trackTotalHits(true); SearchResponse searchResponse = client().search(new SearchRequest(INDEX_NAME).source(sourceBuilder)).get(); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(totalDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(totalDocs)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java index 8416d58cb132..f444719c730f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapperTests.java @@ -598,7 +598,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -606,7 +606,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName1, List.of("a", "b")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -614,7 +614,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("d")), 10 ); - assertEquals(1, topDocs.totalHits.value); + assertEquals(1, topDocs.totalHits.value()); assertEquals(3, topDocs.scoreDocs[0].doc); } { @@ -622,7 +622,7 @@ public class SemanticTextFieldMapperTests extends MapperTestCase { generateNestedTermSparseVectorQuery(mapperService.mappingLookup().nestedLookup(), fieldName2, List.of("z")), 10 ); - assertEquals(0, topDocs.totalHits.value); + assertEquals(0, topDocs.totalHits.value()); } }); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java index f54ce8918307..b8bcb766b53e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/queries/SemanticQueryBuilderTests.java @@ -199,9 +199,9 @@ public class SemanticQueryBuilderTests extends AbstractQueryTestCase { - assertThat(c.getOccur(), equalTo(SHOULD)); - assertThat(c.getQuery(), instanceOf(BoostQuery.class)); - assertThat(((BoostQuery) c.getQuery()).getBoost(), equalTo(TOKEN_WEIGHT)); + assertThat(c.occur(), equalTo(SHOULD)); + assertThat(c.query(), instanceOf(BoostQuery.class)); + assertThat(((BoostQuery) c.query()).getBoost(), equalTo(TOKEN_WEIGHT)); }); } @@ -223,7 +223,7 @@ public class SemanticQueryBuilderTests extends AbstractQueryTestCase outerMustClauses = new ArrayList<>(); List outerFilterClauses = new ArrayList<>(); for (BooleanClause clause : outerBooleanQuery.clauses()) { - BooleanClause.Occur occur = clause.getOccur(); + BooleanClause.Occur occur = clause.occur(); if (occur == MUST) { outerMustClauses.add(clause); } else if (occur == FILTER) { @@ -236,7 +236,7 @@ public class SemanticQueryBuilderTests extends AbstractQueryTestCase { - final int numHits = Math.toIntExact(searchResponse.getHits().getTotalHits().value); + final int numHits = Math.toIntExact(searchResponse.getHits().getTotalHits().value()); final Map pipelineSources = Maps.newMapWithExpectedSize(numHits); final Consumer clearScroll = (response) -> { if (response != null && response.getScrollId() != null) { @@ -148,14 +148,14 @@ public class TransportGetPipelineAction extends HandledTransportAction listener ) { int numberOfHitsSeenSoFar = numberOfHitsSeenPreviously + searchResponse.getHits().getHits().length; - if (numberOfHitsSeenSoFar > searchResponse.getHits().getTotalHits().value) { + if (numberOfHitsSeenSoFar > searchResponse.getHits().getTotalHits().value()) { clearScroll.accept(searchResponse); listener.onFailure( new IllegalStateException( "scrolling returned more hits [" + numberOfHitsSeenSoFar + "] than expected [" - + searchResponse.getHits().getTotalHits().value + + searchResponse.getHits().getTotalHits().value() + "] so bailing out to prevent unbounded " + "memory consumption." ) @@ -179,7 +179,7 @@ public class TransportGetPipelineAction extends HandledTransportAction= docValues.size()) { - return NO_MORE_ORDS; - } - return currentOrd; + return ++currentOrd; } @Override diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 303b94ec655d..e8fd0da496bb 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -651,7 +651,7 @@ public class UnsignedLongFieldMapper extends FieldMapper { List fields = new ArrayList<>(); if (indexed && hasDocValues) { - fields.add(new LongField(fieldType().name(), numericValue)); + fields.add(new LongField(fieldType().name(), numericValue, Field.Store.NO)); } else if (hasDocValues) { fields.add(new SortedNumericDocValuesField(fieldType().name(), numericValue)); } else if (indexed) { diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java index 00532d95574c..4f42103bc454 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionEncoder.java @@ -13,7 +13,6 @@ import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.automaton.Automata; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import java.util.Locale; @@ -213,9 +212,9 @@ class VersionEncoder { a = Operations.concatenate(a, Automata.makeAnyBinary()); assert a.isDeterministic(); - a = MinimizationOperations.minimize(a, 0); + a = Operations.determinize(a, 0); - return new CompiledAutomaton(a, null, true, 0, true); + return new CompiledAutomaton(a, false, true, true); } static class EncodedVersion { diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java index 387a49a29dc2..1e5ecf19bdf8 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionFieldWildcardQuery.java @@ -40,11 +40,11 @@ class VersionFieldWildcardQuery extends AutomatonQuery { private static final byte WILDCARD_CHAR = '?'; VersionFieldWildcardQuery(Term term, boolean caseInsensitive) { - super(term, toAutomaton(term, caseInsensitive), Integer.MAX_VALUE, true); + super(term, toAutomaton(term, caseInsensitive), true); } VersionFieldWildcardQuery(Term term, boolean caseInsensitive, RewriteMethod rewriteMethod) { - super(term, toAutomaton(term, caseInsensitive), Integer.MAX_VALUE, true, rewriteMethod); + super(term, toAutomaton(term, caseInsensitive), true, rewriteMethod); } private static Automaton toAutomaton(Term wildcardquery, boolean caseInsensitive) { @@ -114,7 +114,7 @@ class VersionFieldWildcardQuery extends AutomatonQuery { if (containsPreReleaseSeparator == false) { automata.add(Operations.optional(Automata.makeChar(VersionEncoder.NO_PRERELEASE_SEPARATOR_BYTE))); } - return Operations.concatenate(automata); + return Operations.determinize(Operations.concatenate(automata), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java index 17e1d70cbb47..01f0fdb25655 100644 --- a/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java +++ b/x-pack/plugin/mapper-version/src/main/java/org/elasticsearch/xpack/versionfield/VersionStringDocValuesField.java @@ -47,7 +47,8 @@ public class VersionStringDocValuesField extends AbstractScriptFieldFactory { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[0].getSourceAsMap().get("version")); } ); @@ -134,7 +134,7 @@ public class VersionStringFieldTests extends ESSingleNodeTestCase { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.DESC), response -> { - assertEquals(8, response.getHits().getTotalHits().value); + assertEquals(8, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertEquals("1.3.567#12", hits[0].getSortValues()[0]); assertEquals("1.2.3alpha", hits[1].getSortValues()[0]); @@ -150,7 +150,7 @@ public class VersionStringFieldTests extends ESSingleNodeTestCase { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), response -> { - assertEquals(8, response.getHits().getTotalHits().value); + assertEquals(8, response.getHits().getTotalHits().value()); var hits = response.getHits().getHits(); assertEquals("1.0.0", hits[0].getSortValues()[0]); assertEquals("1.3.0+build.1234567", hits[1].getSortValues()[0]); @@ -179,7 +179,7 @@ public class VersionStringFieldTests extends ESSingleNodeTestCase { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "2.*0")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); @@ -187,21 +187,21 @@ public class VersionStringFieldTests extends ESSingleNodeTestCase { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "<0-10>.<0-10>.*al.*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } ); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", "1.[0-9].[0-9].*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("1.3.0+build.1234567", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); // test case sensitivity / insensitivity assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*alpha.*")), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); }); @@ -211,7 +211,7 @@ public class VersionStringFieldTests extends ESSingleNodeTestCase { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.regexpQuery("version", ".*Alpha.*").caseInsensitive(true)), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0alpha2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } @@ -234,7 +234,7 @@ public class VersionStringFieldTests extends ESSingleNodeTestCase { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.fuzzyQuery("version", "2.3.0")), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); assertEquals("2.1.0", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.33.0", response.getHits().getHits()[1].getSourceAsMap().get("version")); assertEquals("2.a3.0", response.getHits().getHits()[2].getSourceAsMap().get("version")); @@ -288,7 +288,7 @@ public class VersionStringFieldTests extends ESSingleNodeTestCase { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", "*Alpha*").caseInsensitive(true)), response -> { - assertEquals(2, response.getHits().getTotalHits().value); + assertEquals(2, response.getHits().getTotalHits().value()); assertEquals("1.0.0-alpha.2.1.0-rc.1", response.getHits().getHits()[0].getSourceAsMap().get("version")); assertEquals("2.1.0-alpha.beta", response.getHits().getHits()[1].getSourceAsMap().get("version")); } @@ -297,7 +297,7 @@ public class VersionStringFieldTests extends ESSingleNodeTestCase { private void checkWildcardQuery(String indexName, String query, String... expectedResults) { assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.wildcardQuery("version", query)), response -> { - assertEquals(expectedResults.length, response.getHits().getTotalHits().value); + assertEquals(expectedResults.length, response.getHits().getTotalHits().value()); for (int i = 0; i < expectedResults.length; i++) { String expected = expectedResults[i]; Object actual = response.getHits().getHits()[i].getSourceAsMap().get("version"); @@ -321,7 +321,7 @@ public class VersionStringFieldTests extends ESSingleNodeTestCase { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).addDocValueField("version"), response -> { - assertEquals(4, response.getHits().getTotalHits().value); + assertEquals(4, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); assertEquals("1.invalid.0", response.getHits().getAt(0).field("version").getValue()); @@ -359,7 +359,7 @@ public class VersionStringFieldTests extends ESSingleNodeTestCase { assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.matchAllQuery()).addSort("version", SortOrder.ASC), response -> { - assertEquals(4, response.getHits().getTotalHits().value); + assertEquals(4, response.getHits().getTotalHits().value()); SearchHit[] hits = response.getHits().getHits(); assertEquals("2.2.0", hits[0].getSortValues()[0]); assertEquals("", hits[1].getSortValues()[0]); @@ -437,36 +437,36 @@ public class VersionStringFieldTests extends ESSingleNodeTestCase { client().admin().indices().prepareRefresh(indexName).get(); assertResponse(client().prepareSearch(indexName).addSort("version", SortOrder.ASC), response -> { - assertEquals(3, response.getHits().getTotalHits().value); + assertEquals(3, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); assertEquals("2", response.getHits().getAt(1).getId()); assertEquals("3", response.getHits().getAt(2).getId()); }); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "3.0.0")), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("1", response.getHits().getAt(0).getId()); }); assertResponse(client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("version", "4.alpha.0")), response -> { - assertEquals(1, response.getHits().getTotalHits().value); + assertEquals(1, response.getHits().getTotalHits().value()); assertEquals("2", response.getHits().getAt(0).getId()); }); // range assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").to("1.5.0")), - response -> assertEquals(1, response.getHits().getTotalHits().value) + response -> assertEquals(1, response.getHits().getTotalHits().value()) ); assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("1.5.0")), - response -> assertEquals(3, response.getHits().getTotalHits().value) + response -> assertEquals(3, response.getHits().getTotalHits().value()) ); assertResponse( client().prepareSearch(indexName).setQuery(QueryBuilders.rangeQuery("version").from("5.0.0").to("6.0.0")), - response -> assertEquals(1, response.getHits().getTotalHits().value) + response -> assertEquals(1, response.getHits().getTotalHits().value()) ); } } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java index 8c245a4543ab..39519dc7931d 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java @@ -345,7 +345,7 @@ public class DeleteExpiredDataIT extends MlNativeAutodetectIntegTestCase { assertResponse( prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()).setFetchSource(false).setTrackTotalHits(true).setSize(10000), stateDocsResponse -> { - assertThat(stateDocsResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(5L)); + assertThat(stateDocsResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(5L)); int nonExistingJobDocsCount = 0; List nonExistingJobExampleIds = new ArrayList<>(); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index 2e096f3262cb..9864c88d1405 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -372,7 +372,7 @@ abstract class MlNativeAutodetectIntegTestCase extends MlNativeIntegTestCase { .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) .filter(QueryBuilders.termQuery(Forecast.FORECAST_ID.getPreferredName(), forecastId)) ), - searchResponse -> count.set(searchResponse.getHits().getTotalHits().value) + searchResponse -> count.set(searchResponse.getHits().getTotalHits().value()) ); return count.get(); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java index 94bc3150cb12..5f82d996c87f 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java @@ -77,7 +77,7 @@ public class PersistJobIT extends MlNativeAutodetectIntegTestCase { ++numStateRecords; } } - assertThat(stateDocsResponse1.getHits().getTotalHits().value, equalTo(2L)); + assertThat(stateDocsResponse1.getHits().getTotalHits().value(), equalTo(2L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(1)); } @@ -117,7 +117,7 @@ public class PersistJobIT extends MlNativeAutodetectIntegTestCase { } } - assertThat(stateDocsResponse2.getHits().getTotalHits().value, equalTo(3L)); + assertThat(stateDocsResponse2.getHits().getTotalHits().value(), equalTo(3L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(2)); @@ -154,7 +154,7 @@ public class PersistJobIT extends MlNativeAutodetectIntegTestCase { ++numStateRecords; } } - assertThat(stateDocsResponse.getHits().getTotalHits().value, equalTo(2L)); + assertThat(stateDocsResponse.getHits().getTotalHits().value(), equalTo(2L)); assertThat(numQuantileRecords, equalTo(1)); assertThat(numStateRecords, equalTo(1)); } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java index f5d0b23b437f..8a6499ec3bb6 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RegressionIT.java @@ -164,7 +164,7 @@ public class RegressionIT extends MlNativeDataFrameAnalyticsIntegTestCase { + testDocsWithEmptyFeatureImportance + "] test docs with empty feature importance" + " from " - + sourceData.getHits().getTotalHits().value + + sourceData.getHits().getTotalHits().value() + " hits.\n" + badDocuments, trainingDocsWithEmptyFeatureImportance + testDocsWithEmptyFeatureImportance, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java index 260a5dea0a3c..388583f6f865 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java @@ -295,7 +295,7 @@ public class RevertModelSnapshotIT extends MlNativeAutodetectIntegTestCase { prepareSearch(".ml-state*").setQuery(QueryBuilders.idsQuery().addIds(Quantiles.documentId(jobId))).setSize(1), response -> { SearchHits hits = response.getHits(); - assertThat(hits.getTotalHits().value, equalTo(1L)); + assertThat(hits.getTotalHits().value(), equalTo(1L)); try ( XContentParser parser = JsonXContent.jsonXContent.createParser( XContentParserConfiguration.EMPTY, diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java index 8fbad7ccd387..1505d374dfa0 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/RunDataFrameAnalyticsIT.java @@ -396,7 +396,7 @@ public class RunDataFrameAnalyticsIT extends MlNativeDataFrameAnalyticsIntegTest } assertResponse(prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value == docCount) { + if (searchResponse.getHits().getTotalHits().value() == docCount) { long seenCount = SearchResponseUtils.getTotalHitsValue( prepareSearch(config.getDest().getIndex()).setTrackTotalHits(true) .setQuery(QueryBuilders.existsQuery("custom_ml.outlier_score")) @@ -404,7 +404,7 @@ public class RunDataFrameAnalyticsIT extends MlNativeDataFrameAnalyticsIntegTest logger.debug("We stopped during analysis: [{}] < [{}]", seenCount, docCount); assertThat(seenCount, lessThan((long) docCount)); } else { - logger.debug("We stopped during reindexing: [{}] < [{}]", searchResponse.getHits().getTotalHits().value, docCount); + logger.debug("We stopped during reindexing: [{}] < [{}]", searchResponse.getHits().getTotalHits().value(), docCount); } }); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java index c15750de3b33..edc851def446 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/BucketCorrelationAggregationIT.java @@ -77,7 +77,7 @@ public class BucketCorrelationAggregationIT extends MlSingleNodeTestCase { .setSize(0) .setTrackTotalHits(true), percentilesSearch -> { - long totalHits = percentilesSearch.getHits().getTotalHits().value; + long totalHits = percentilesSearch.getHits().getTotalHits().value(); Percentiles percentiles = percentilesSearch.getAggregations().get("percentiles"); Tuple aggs = buildRangeAggAndSetExpectations( percentiles, diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java index 8fddfa47c377..139d1b074c7b 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedCcsIT.java @@ -192,7 +192,7 @@ public class DatafeedCcsIT extends AbstractMultiClustersTestCase { .setQuery(new MatchPhraseQueryBuilder("message", message)) .get(); try { - return response.getHits().getTotalHits().value > 0; + return response.getHits().getTotalHits().value() > 0; } finally { response.decRef(); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java index 17fe20c5115f..dfb960794537 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlDistributedFailureIT.java @@ -767,7 +767,7 @@ public class MlDistributedFailureIT extends BaseMlIntegTestCase { prepareSearch().setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN) .setQuery(QueryBuilders.idsQuery().addIds(DataCounts.documentId(jobId))), searchResponse -> { - if (searchResponse.getHits().getTotalHits().value != 1) { + if (searchResponse.getHits().getTotalHits().value() != 1) { setOnce.set(new DataCounts(jobId)); return; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index a47b67e49085..210973f2601d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -189,7 +189,7 @@ public class TransportGetOverallBucketsAction extends HandledTransportAction< ML_ORIGIN, searchRequest, ActionListener.wrap(searchResponse -> { - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); if (totalHits > 0) { InternalAggregations aggregations = searchResponse.getAggregations(); Min min = aggregations.get(EARLIEST_TIME); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index 6aaa1e50f2e8..d676e6cc9d06 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -415,7 +415,7 @@ public class TransportPutTrainedModelAction extends TransportMasterNodeActionwrap(response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { listener.onFailure( ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INFERENCE_MODEL_ID_AND_TAGS_UNIQUE, modelId)) ); @@ -443,7 +443,7 @@ public class TransportPutTrainedModelAction extends TransportMasterNodeActionwrap(response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { listener.onFailure( ExceptionsHelper.badRequestException(Messages.getMessage(Messages.INFERENCE_TAGS_AND_MODEL_IDS_UNIQUE, tags)) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java index 2ec460a08caf..759538b4cdc6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartDataFrameAnalyticsAction.java @@ -433,7 +433,7 @@ public class TransportStartDataFrameAnalyticsAction extends TransportMasterNodeA TransportSearchAction.TYPE, destEmptySearch, ActionListener.wrap(searchResponse -> { - if (searchResponse.getHits().getTotalHits().value > 0) { + if (searchResponse.getHits().getTotalHits().value() > 0) { listener.onFailure(ExceptionsHelper.badRequestException("dest index [{}] must be empty", destIndex)); } else { listener.onResponse(startContext); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java index f0e03a1e9497..7c41dbd46341 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/DataExtractorUtils.java @@ -62,7 +62,7 @@ public final class DataExtractorUtils { } else { Long earliestTime = toLongIfFinite((aggregations.get(EARLIEST_TIME)).value()); Long latestTime = toLongIfFinite((aggregations.get(LATEST_TIME)).value()); - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); return new DataExtractor.DataSummary(earliestTime, latestTime, totalHits); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index 20da61a3d691..7829adb39567 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -226,7 +226,7 @@ public class DatafeedConfigProvider { listener.delegateFailureAndWrap((delegate, response) -> { Set datafeedIds = new HashSet<>(); // There cannot be more than one datafeed per job - assert response.getHits().getTotalHits().value <= jobIds.size(); + assert response.getHits().getTotalHits().value() <= jobIds.size(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit : hits) { @@ -259,7 +259,7 @@ public class DatafeedConfigProvider { listener.delegateFailureAndWrap((delegate, response) -> { Map datafeedsByJobId = new HashMap<>(); // There cannot be more than one datafeed per job - assert response.getHits().getTotalHits().value <= jobIds.size(); + assert response.getHits().getTotalHits().value() <= jobIds.size(); SearchHit[] hits = response.getHits().getHits(); for (SearchHit hit : hits) { DatafeedConfig.Builder builder = parseLenientlyFromSource(hit.getSourceRef()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index c890ab599c38..315d2249d00c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -377,7 +377,7 @@ public class DataFrameDataExtractor { SearchRequestBuilder searchRequestBuilder = buildDataSummarySearchRequestBuilder(); SearchResponse searchResponse = executeSearchRequest(searchRequestBuilder); try { - long rows = searchResponse.getHits().getTotalHits().value; + long rows = searchResponse.getHits().getTotalHits().value(); LOGGER.debug(() -> format("[%s] Data summary rows [%s]", context.jobId, rows)); return new DataSummary(rows, organicFeatures.length + processedFeatures.length); } finally { @@ -396,7 +396,7 @@ public class DataFrameDataExtractor { TransportSearchAction.TYPE, searchRequestBuilder.request(), dataSummaryActionListener.delegateFailureAndWrap( - (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value, numberOfFields)) + (l, searchResponse) -> l.onResponse(new DataSummary(searchResponse.getHits().getTotalHits().value(), numberOfFields)) ) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java index dfcc12d98be4..64cf493028ad 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/inference/InferenceRunner.java @@ -169,7 +169,7 @@ public class InferenceRunner { ); try { Max maxIncrementalIdAgg = searchResponse.getAggregations().get(DestinationIndex.INCREMENTAL_ID); - long processedTestDocCount = searchResponse.getHits().getTotalHits().value; + long processedTestDocCount = searchResponse.getHits().getTotalHits().value(); Long lastIncrementalId = processedTestDocCount == 0 ? null : (long) maxIncrementalIdAgg.value(); if (lastIncrementalId != null) { LOGGER.debug( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java index 482e82f9ec30..fdd4bdd120f6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/steps/InferenceStep.java @@ -115,7 +115,7 @@ public class InferenceStep extends AbstractDataFrameAnalyticsStep { ML_ORIGIN, TransportSearchAction.TYPE, searchRequest, - listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value > 0)) + listener.delegateFailureAndWrap((l, searchResponse) -> l.onResponse(searchResponse.getHits().getTotalHits().value() > 0)) ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java index 3ef2affa5d39..0b3dd573deaa 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/traintestsplit/TrainTestSplitterFactory.java @@ -70,7 +70,7 @@ public class TrainTestSplitterFactory { regression.getDependentVariable(), regression.getTrainingPercent(), regression.getRandomizeSeed(), - searchResponse.getHits().getTotalHits().value + searchResponse.getHits().getTotalHits().value() ); } finally { searchResponse.decRef(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java index f56c589aea19..c4396c4f9d2c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/ChunkedTrainedModelRestorer.java @@ -196,7 +196,7 @@ public class ChunkedTrainedModelRestorer { numDocsWritten += searchResponse.getHits().getHits().length; boolean endOfSearch = searchResponse.getHits().getHits().length < searchSize - || searchResponse.getHits().getTotalHits().value == numDocsWritten; + || searchResponse.getHits().getTotalHits().value() == numDocsWritten; if (endOfSearch) { successConsumer.accept(Boolean.TRUE); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java index f493c735d87e..ff5f37427b18 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java @@ -1008,7 +1008,7 @@ public class TrainedModelProvider { ML_ORIGIN, searchRequest, ActionListener.wrap(response -> { - long totalHitCount = response.getHits().getTotalHits().value + foundResourceIds.size(); + long totalHitCount = response.getHits().getTotalHits().value() + foundResourceIds.size(); Set foundFromDocs = new HashSet<>(); for (SearchHit hit : response.getHits().getHits()) { Map docSource = hit.getSourceAsMap(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java index 8493513f40bd..df9a187f5961 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobConfigProvider.java @@ -683,7 +683,7 @@ public class JobConfigProvider { ML_ORIGIN, searchRequest, ActionListener.wrap( - response -> listener.onResponse(response.getHits().getTotalHits().value > 0), + response -> listener.onResponse(response.getHits().getTotalHits().value() > 0), listener::onFailure ), client::search diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java index b9cc1902b7ab..0f3abe3ab8c2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobDataDeleter.java @@ -352,7 +352,7 @@ public class JobDataDeleter { } } SearchResponse searchResponse = item.getResponse(); - if (searchResponse.getHits().getTotalHits().value > 0 || indexNames.get()[i].equals(defaultSharedIndex)) { + if (searchResponse.getHits().getTotalHits().value() > 0 || indexNames.get()[i].equals(defaultSharedIndex)) { needToRunDBQTemp = true; } else { indicesToDelete.add(indexNames.get()[i]); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index f9e4e62e4e3b..51b3e0b55d75 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -870,7 +870,7 @@ public class JobResultsProvider { throw QueryPage.emptyQueryPage(Bucket.RESULTS_FIELD); } - QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, Bucket.RESULTS_FIELD); + QueryPage buckets = new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), Bucket.RESULTS_FIELD); if (query.isExpand()) { Iterator bucketsToExpand = buckets.results() @@ -1086,7 +1086,7 @@ public class JobResultsProvider { } QueryPage result = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), CategoryDefinition.RESULTS_FIELD ); handler.accept(result); @@ -1143,7 +1143,7 @@ public class JobResultsProvider { } QueryPage queryPage = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), AnomalyRecord.RESULTS_FIELD ); handler.accept(queryPage); @@ -1207,7 +1207,7 @@ public class JobResultsProvider { } QueryPage result = new QueryPage<>( influencers, - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), Influencer.RESULTS_FIELD ); handler.accept(result); @@ -1375,7 +1375,7 @@ public class JobResultsProvider { QueryPage result = new QueryPage<>( results, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), ModelSnapshot.RESULTS_FIELD ); handler.accept(result); @@ -1411,7 +1411,7 @@ public class JobResultsProvider { } } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), ModelPlot.RESULTS_FIELD); } finally { searchResponse.decRef(); } @@ -1444,7 +1444,7 @@ public class JobResultsProvider { } } - return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value, ModelPlot.RESULTS_FIELD); + return new QueryPage<>(results, searchResponse.getHits().getTotalHits().value(), ModelPlot.RESULTS_FIELD); } finally { searchResponse.decRef(); } @@ -1700,7 +1700,7 @@ public class JobResultsProvider { event.eventId(hit.getId()); events.add(event.build()); } - handler.onResponse(new QueryPage<>(events, response.getHits().getTotalHits().value, ScheduledEvent.RESULTS_FIELD)); + handler.onResponse(new QueryPage<>(events, response.getHits().getTotalHits().value(), ScheduledEvent.RESULTS_FIELD)); } catch (Exception e) { handler.onFailure(e); } @@ -1901,7 +1901,7 @@ public class JobResultsProvider { for (SearchHit hit : hits) { calendars.add(MlParserUtils.parse(hit, Calendar.LENIENT_PARSER).build()); } - listener.onResponse(new QueryPage<>(calendars, response.getHits().getTotalHits().value, Calendar.RESULTS_FIELD)); + listener.onResponse(new QueryPage<>(calendars, response.getHits().getTotalHits().value(), Calendar.RESULTS_FIELD)); } catch (Exception e) { listener.onFailure(e); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java index 886c19a65a4d..194759c026a3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/retention/ExpiredForecastsRemover.java @@ -168,7 +168,7 @@ public class ExpiredForecastsRemover implements MlDataRemover { List forecastsToDelete = new ArrayList<>(); SearchHits hits = searchResponse.getHits(); - if (hits.getTotalHits().value > MAX_FORECASTS) { + if (hits.getTotalHits().value() > MAX_FORECASTS) { LOGGER.info("More than [{}] forecasts were found. This run will only delete [{}] of them", MAX_FORECASTS, MAX_FORECASTS); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java index 86488a647baa..ef6087f021e9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/BatchedDocumentsIterator.java @@ -111,7 +111,7 @@ public abstract class BatchedDocumentsIterator implements BatchedIterator ); SearchResponse searchResponse = client.search(searchRequest).actionGet(); - totalHits = searchResponse.getHits().getTotalHits().value; + totalHits = searchResponse.getHits().getTotalHits().value(); scrollId = searchResponse.getScrollId(); return searchResponse; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java index f63f6e054917..802bcaf3b342 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/SearchAfterDocumentsIterator.java @@ -110,7 +110,7 @@ public abstract class SearchAfterDocumentsIterator implements BatchedIterator SearchResponse searchResponse = doSearch(searchAfterFields()); try { if (trackTotalHits && totalHits.get() == 0) { - totalHits.set(searchResponse.getHits().getTotalHits().value); + totalHits.set(searchResponse.getHits().getTotalHits().value()); } return mapHits(searchResponse); } finally { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java index 3d17d8dd23ff..13cf6d87728a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/SparseVectorQueryBuilderTests.java @@ -166,8 +166,8 @@ public class SparseVectorQueryBuilderTests extends AbstractQueryTestCase boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java index 8da6fc843614..00d50e0d0d7b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java @@ -139,8 +139,8 @@ public class TextExpansionQueryBuilderTests extends AbstractQueryTestCase boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); for (var clause : booleanQuery.clauses()) { - assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); - assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + assertEquals(BooleanClause.Occur.SHOULD, clause.occur()); + assertThat(clause.query(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); } } diff --git a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java index daea70abd29e..7ddaa53a5991 100644 --- a/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java +++ b/x-pack/plugin/monitoring/src/internalClusterTest/java/org/elasticsearch/xpack/monitoring/integration/MonitoringIT.java @@ -149,7 +149,7 @@ public class MonitoringIT extends ESSingleNodeTestCase { assertResponse(client().prepareSearch(".monitoring-" + system.getSystem() + "-" + TEMPLATE_VERSION + "-*"), response -> { // exactly 3 results are expected - assertThat("No monitoring documents yet", response.getHits().getTotalHits().value, equalTo(3L)); + assertThat("No monitoring documents yet", response.getHits().getTotalHits().value(), equalTo(3L)); final List> sources = Arrays.stream(response.getHits().getHits()) .map(SearchHit::getSourceAsMap) @@ -165,7 +165,7 @@ public class MonitoringIT extends ESSingleNodeTestCase { assertCheckedResponse(client().prepareSearch(monitoringIndex), response -> { final SearchHits hits = response.getHits(); - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat( "Monitoring documents must have the same timestamp", Arrays.stream(hits.getHits()).map(hit -> extractValue("timestamp", hit.getSourceAsMap())).distinct().count(), diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java index 93e055b58ddc..d68395ef7656 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterIntegTests.java @@ -113,7 +113,7 @@ public class LocalExporterIntegTests extends LocalExporterIntegTestCase { assertResponse( prepareSearch(".monitoring-*"), - response -> assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value)) + response -> assertThat((long) nbDocs, lessThanOrEqualTo(response.getHits().getTotalHits().value())) ); }); @@ -260,7 +260,7 @@ public class LocalExporterIntegTests extends LocalExporterIntegTestCase { DateFormatter dateFormatter = DateFormatter.forPattern(customTimeFormat).withZone(ZoneOffset.UTC); assertResponse(prepareSearch(".monitoring-*").setSize(100), rsp -> { - assertThat(rsp.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(rsp.getHits().getTotalHits().value(), greaterThan(0L)); for (SearchHit hit : rsp.getHits().getHits()) { final Map source = hit.getSourceAsMap(); diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java index d6e15ea25c8e..d382905c1c9c 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporterResourceIntegTests.java @@ -293,12 +293,14 @@ public class LocalExporterResourceIntegTests extends LocalExporterIntegTestCase .query(QueryBuilders.matchQuery("metadata.xpack.cluster_uuid", clusterUUID)); assertResponse(prepareSearch(".watches").setSource(searchSource), response -> { - if (response.getHits().getTotalHits().value > 0) { + if (response.getHits().getTotalHits().value() > 0) { List invalidWatches = new ArrayList<>(); for (SearchHit hit : response.getHits().getHits()) { invalidWatches.add(ObjectPath.eval("metadata.xpack.watch", hit.getSourceAsMap())); } - fail("Found [" + response.getHits().getTotalHits().value + "] invalid watches when none were expected: " + invalidWatches); + fail( + "Found [" + response.getHits().getTotalHits().value() + "] invalid watches when none were expected: " + invalidWatches + ); } }); } diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java index 803c7f410c41..71f788727aa2 100644 --- a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/AbstractArchiveTestCase.java @@ -97,7 +97,7 @@ public abstract class AbstractArchiveTestCase extends AbstractSnapshotIntegTestC .getAsVersionId( "version", IndexVersion::fromId, - IndexVersion.fromId(randomBoolean() ? 5000099 : 6000099) + IndexVersion.fromId(randomFrom(5000099, 6000099, 7000099)) ) ) ) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java index d1455eaa2f1c..18adebb145f9 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldSegmentInfos.java @@ -196,7 +196,7 @@ public class OldSegmentInfos implements Cloneable, Iterable { long generation = generationFromSegmentsFileName(segmentFileName); // System.out.println(Thread.currentThread() + ": SegmentInfos.readCommit " + segmentFileName); - try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName, IOContext.READONCE)) { + try (ChecksumIndexInput input = directory.openChecksumInput(segmentFileName)) { try { return readCommit(directory, input, generation, minSupportedMajorVersion); } catch (EOFException | NoSuchFileException | FileNotFoundException e) { @@ -305,7 +305,7 @@ public class OldSegmentInfos implements Cloneable, Iterable { byte[] segmentID = new byte[StringHelper.ID_LENGTH]; input.readBytes(segmentID, 0, segmentID.length); Codec codec = readCodec(input); - SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.READ); + SegmentInfo info = codec.segmentInfoFormat().read(directory, segName, segmentID, IOContext.DEFAULT); info.setCodec(codec); totalDocs += info.maxDoc(); long delGen = CodecUtil.readBELong(input); diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java index 25b4b685ac50..3ed8fc26ac93 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/BWCCodec.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.lucene.bwc.codecs; -import org.apache.lucene.backward_codecs.lucene70.Lucene70Codec; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.codecs.FieldsConsumer; @@ -27,7 +26,6 @@ import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.index.Terms; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; -import org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.BWCLucene70Codec; import java.io.IOException; import java.util.ArrayList; @@ -101,6 +99,7 @@ public abstract class BWCCodec extends Codec { false, fieldInfo.getIndexOptions(), fieldInfo.getDocValuesType(), + fieldInfo.docValuesSkipIndexType(), fieldInfo.getDocValuesGen(), fieldInfo.attributes(), fieldInfo.getPointDimensionCount(), @@ -119,9 +118,7 @@ public abstract class BWCCodec extends Codec { } public static SegmentInfo wrap(SegmentInfo segmentInfo) { - // special handling for Lucene70Codec (which is currently bundled with Lucene) - // Use BWCLucene70Codec instead as that one extends BWCCodec (similar to all other older codecs) - final Codec codec = segmentInfo.getCodec() instanceof Lucene70Codec ? new BWCLucene70Codec() : segmentInfo.getCodec(); + final Codec codec = segmentInfo.getCodec(); final SegmentInfo segmentInfo1 = new SegmentInfo( segmentInfo.dir, // Use Version.LATEST instead of original version, otherwise SegmentCommitInfo will bark when processing (N-1 limitation) diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java index 5a9b1bb25230..c7abed7d69a5 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacyDocValuesIterables.java @@ -182,10 +182,7 @@ public class LegacyDocValuesIterables { try { if (nextDocID > values.docID()) { if (values.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { - ordCount = 0; - while (values.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { - ordCount++; - } + ordCount = values.docValueCount(); } } int result; @@ -225,6 +222,7 @@ public class LegacyDocValuesIterables { return new Iterator() { private boolean nextIsSet; + private int currentIndex = 0; private long nextOrd; private void setNext() { @@ -232,17 +230,22 @@ public class LegacyDocValuesIterables { if (nextIsSet == false) { if (values.docID() == -1) { values.nextDoc(); + currentIndex = 0; } while (true) { if (values.docID() == DocIdSetIterator.NO_MORE_DOCS) { nextOrd = -1; break; } - nextOrd = values.nextOrd(); - if (nextOrd != -1) { - break; + if (currentIndex < values.docValueCount()) { + nextOrd = values.nextOrd(); + currentIndex++; + if (nextOrd != -1) { + break; + } } values.nextDoc(); + currentIndex = 0; } nextIsSet = true; } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java index 21b6818bd561..80236f3847e1 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/index/LegacySortedSetDocValuesWrapper.java @@ -53,7 +53,7 @@ public final class LegacySortedSetDocValuesWrapper extends SortedSetDocValues { while (docID < maxDoc) { values.setDocument(docID); ord = values.nextOrd(); - if (ord != NO_MORE_ORDS) { + if (ord != LegacySortedSetDocValues.NO_MORE_ORDS) { return docID; } docID++; @@ -81,7 +81,7 @@ public final class LegacySortedSetDocValuesWrapper extends SortedSetDocValues { docID = target; values.setDocument(docID); ord = values.nextOrd(); - return ord != NO_MORE_ORDS; + return ord != LegacySortedSetDocValues.NO_MORE_ORDS; } @Override @@ -92,7 +92,7 @@ public final class LegacySortedSetDocValuesWrapper extends SortedSetDocValues { @Override public long nextOrd() { long result = ord; - if (result != NO_MORE_ORDS) { + if (result != LegacySortedSetDocValues.NO_MORE_ORDS) { ord = values.nextOrd(); } return result; diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java index a567f2586940..007b398624d5 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/ForUtil.java @@ -105,14 +105,14 @@ final class ForUtil { for (int bpv = 1; bpv <= 32; ++bpv) { final FormatAndBits formatAndBits = PackedInts.fastestFormatAndBits(BLOCK_SIZE, bpv, acceptableOverheadRatio); - assert formatAndBits.format.isSupported(formatAndBits.bitsPerValue); - assert formatAndBits.bitsPerValue <= 32; - encodedSizes[bpv] = encodedSize(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); - encoders[bpv] = PackedInts.getEncoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); - decoders[bpv] = PackedInts.getDecoder(formatAndBits.format, PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue); + assert formatAndBits.format().isSupported(formatAndBits.bitsPerValue()); + assert formatAndBits.bitsPerValue() <= 32; + encodedSizes[bpv] = encodedSize(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); + encoders[bpv] = PackedInts.getEncoder(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); + decoders[bpv] = PackedInts.getDecoder(formatAndBits.format(), PackedInts.VERSION_CURRENT, formatAndBits.bitsPerValue()); iterations[bpv] = computeIterations(decoders[bpv]); - out.writeVInt(formatAndBits.format.getId() << 5 | (formatAndBits.bitsPerValue - 1)); + out.writeVInt(formatAndBits.format().getId() << 5 | (formatAndBits.bitsPerValue() - 1)); } } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java index 83fcb1744910..06002d2d10de 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/Lucene50FieldInfosFormat.java @@ -23,6 +23,7 @@ import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.codecs.FieldInfosFormat; import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DocValuesSkipIndexType; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; @@ -103,6 +104,7 @@ public final class Lucene50FieldInfosFormat extends FieldInfosFormat { storePayloads, indexOptions, docValuesType, + DocValuesSkipIndexType.NONE, dvGen, attributes, 0, diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java index 09147e821d9f..607d9903abc8 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesProducer.java @@ -28,6 +28,7 @@ import org.apache.lucene.index.BaseTermsEnum; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.ImpactsEnum; @@ -1316,6 +1317,11 @@ final class Lucene54DocValuesProducer extends DocValuesProducer implements Close } } + @Override + public DocValuesSkipper getSkipper(FieldInfo field) throws IOException { + return null; + } + private SortedSetDocValues getSortedSetWithAddresses(FieldInfo field) throws IOException { final long valueCount = binaries.get(field.name).count; // we keep the byte[]s and list of ords on disk, these could be large diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java index f3ce3ea0755e..43203caf571f 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene60/MetadataOnlyBKDReader.java @@ -63,14 +63,14 @@ public class MetadataOnlyBKDReader extends PointValues { numLeaves = metaIn.readVInt(); assert numLeaves > 0; - minPackedValue = new byte[config.packedIndexBytesLength]; - maxPackedValue = new byte[config.packedIndexBytesLength]; + minPackedValue = new byte[config.packedIndexBytesLength()]; + maxPackedValue = new byte[config.packedIndexBytesLength()]; - metaIn.readBytes(minPackedValue, 0, config.packedIndexBytesLength); - metaIn.readBytes(maxPackedValue, 0, config.packedIndexBytesLength); - final ArrayUtil.ByteArrayComparator comparator = ArrayUtil.getUnsignedComparator(config.bytesPerDim); - for (int dim = 0; dim < config.numIndexDims; dim++) { - if (comparator.compare(minPackedValue, dim * config.bytesPerDim, maxPackedValue, dim * config.bytesPerDim) > 0) { + metaIn.readBytes(minPackedValue, 0, config.packedIndexBytesLength()); + metaIn.readBytes(maxPackedValue, 0, config.packedIndexBytesLength()); + final ArrayUtil.ByteArrayComparator comparator = ArrayUtil.getUnsignedComparator(config.bytesPerDim()); + for (int dim = 0; dim < config.numIndexDims(); dim++) { + if (comparator.compare(minPackedValue, dim * config.bytesPerDim(), maxPackedValue, dim * config.bytesPerDim()) > 0) { throw new CorruptIndexException( "minPackedValue " + new BytesRef(minPackedValue) @@ -104,17 +104,17 @@ public class MetadataOnlyBKDReader extends PointValues { @Override public int getNumDimensions() { - return config.numDims; + return config.numDims(); } @Override public int getNumIndexDimensions() { - return config.numIndexDims; + return config.numIndexDims(); } @Override public int getBytesPerDimension() { - return config.bytesPerDim; + return config.bytesPerDim(); } @Override diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java index 0e689138acd8..0100a8bd1463 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/BWCLucene70Codec.java @@ -32,7 +32,7 @@ public class BWCLucene70Codec extends BWCCodec { private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat(); private final CompoundFormat compoundFormat = new Lucene50CompoundFormat(); private final StoredFieldsFormat storedFieldsFormat; - private final DocValuesFormat defaultDVFormat = DocValuesFormat.forName("Lucene70"); + private final DocValuesFormat defaultDVFormat = new Lucene70DocValuesFormat(); private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() { @Override public DocValuesFormat getDocValuesFormatForField(String field) { @@ -47,7 +47,11 @@ public class BWCLucene70Codec extends BWCCodec { }; public BWCLucene70Codec() { - super("BWCLucene70Codec"); + this("BWCLucene70Codec"); + } + + protected BWCLucene70Codec(String name) { + super(name); storedFieldsFormat = new Lucene50StoredFieldsFormat(Lucene50StoredFieldsFormat.Mode.BEST_SPEED); } diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java new file mode 100644 index 000000000000..75119247cdb1 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/IndexedDISI.java @@ -0,0 +1,327 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BitSetIterator; +import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.RoaringDocIdSet; + +import java.io.DataInput; +import java.io.IOException; + +/** + * Disk-based implementation of a {@link DocIdSetIterator} which can return the index of the current + * document, i.e. the ordinal of the current document among the list of documents that this iterator + * can return. This is useful to implement sparse doc values by only having to encode values for + * documents that actually have a value. + * + *

    Implementation-wise, this {@link DocIdSetIterator} is inspired of {@link RoaringDocIdSet + * roaring bitmaps} and encodes ranges of {@code 65536} documents independently and picks between 3 + * encodings depending on the density of the range: + * + *

      + *
    • {@code ALL} if the range contains 65536 documents exactly, + *
    • {@code DENSE} if the range contains 4096 documents or more; in that case documents are + * stored in a bit set, + *
    • {@code SPARSE} otherwise, and the lower 16 bits of the doc IDs are stored in a {@link + * DataInput#readShort() short}. + *
    + * + *

    Only ranges that contain at least one value are encoded. + * + *

    This implementation uses 6 bytes per document in the worst-case, which happens in the case + * that all ranges contain exactly one document. + */ +final class IndexedDISI extends DocIdSetIterator { + + static final int MAX_ARRAY_LENGTH = (1 << 12) - 1; + + private static void flush(int block, FixedBitSet buffer, int cardinality, IndexOutput out) throws IOException { + assert block >= 0 && block < 65536; + out.writeShort((short) block); + assert cardinality > 0 && cardinality <= 65536; + out.writeShort((short) (cardinality - 1)); + if (cardinality > MAX_ARRAY_LENGTH) { + if (cardinality != 65536) { // all docs are set + for (long word : buffer.getBits()) { + out.writeLong(word); + } + } + } else { + BitSetIterator it = new BitSetIterator(buffer, cardinality); + for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { + out.writeShort((short) doc); + } + } + } + + static void writeBitSet(DocIdSetIterator it, IndexOutput out) throws IOException { + int i = 0; + final FixedBitSet buffer = new FixedBitSet(1 << 16); + int prevBlock = -1; + for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) { + final int block = doc >>> 16; + if (prevBlock != -1 && block != prevBlock) { + flush(prevBlock, buffer, i, out); + buffer.clear(0, buffer.length()); + prevBlock = block; + i = 0; + } + buffer.set(doc & 0xFFFF); + i++; + prevBlock = block; + } + if (i > 0) { + flush(prevBlock, buffer, i, out); + buffer.clear(0, buffer.length()); + } + // NO_MORE_DOCS is stored explicitly + buffer.set(DocIdSetIterator.NO_MORE_DOCS & 0xFFFF); + flush(DocIdSetIterator.NO_MORE_DOCS >>> 16, buffer, 1, out); + } + + /** The slice that stores the {@link DocIdSetIterator}. */ + private final IndexInput slice; + + private final long cost; + + IndexedDISI(IndexInput in, long offset, long length, long cost) throws IOException { + this(in.slice("docs", offset, length), cost); + } + + // This constructor allows to pass the slice directly in case it helps reuse + // see eg. Lucene70 norms producer's merge instance + IndexedDISI(IndexInput slice, long cost) throws IOException { + this.slice = slice; + this.cost = cost; + } + + private int block = -1; + private long blockEnd; + private int nextBlockIndex = -1; + Method method; + + private int doc = -1; + private int index = -1; + + // SPARSE variables + boolean exists; + + // DENSE variables + private long word; + private int wordIndex = -1; + // number of one bits encountered so far, including those of `word` + private int numberOfOnes; + + // ALL variables + private int gap; + + @Override + public int docID() { + return doc; + } + + @Override + public int advance(int target) throws IOException { + final int targetBlock = target & 0xFFFF0000; + if (block < targetBlock) { + advanceBlock(targetBlock); + } + if (block == targetBlock) { + if (method.advanceWithinBlock(this, target)) { + return doc; + } + readBlockHeader(); + } + boolean found = method.advanceWithinBlock(this, block); + assert found; + return doc; + } + + public boolean advanceExact(int target) throws IOException { + final int targetBlock = target & 0xFFFF0000; + if (block < targetBlock) { + advanceBlock(targetBlock); + } + boolean found = block == targetBlock && method.advanceExactWithinBlock(this, target); + this.doc = target; + return found; + } + + private void advanceBlock(int targetBlock) throws IOException { + do { + slice.seek(blockEnd); + readBlockHeader(); + } while (block < targetBlock); + } + + private void readBlockHeader() throws IOException { + block = Short.toUnsignedInt(slice.readShort()) << 16; + assert block >= 0; + final int numValues = 1 + Short.toUnsignedInt(slice.readShort()); + index = nextBlockIndex; + nextBlockIndex = index + numValues; + if (numValues <= MAX_ARRAY_LENGTH) { + method = Method.SPARSE; + blockEnd = slice.getFilePointer() + (numValues << 1); + } else if (numValues == 65536) { + method = Method.ALL; + blockEnd = slice.getFilePointer(); + gap = block - index - 1; + } else { + method = Method.DENSE; + blockEnd = slice.getFilePointer() + (1 << 13); + wordIndex = -1; + numberOfOnes = index + 1; + } + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + public int index() { + return index; + } + + @Override + public long cost() { + return cost; + } + + enum Method { + SPARSE { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + // TODO: binary search + for (; disi.index < disi.nextBlockIndex;) { + int doc = Short.toUnsignedInt(disi.slice.readShort()); + disi.index++; + if (doc >= targetInBlock) { + disi.doc = disi.block | doc; + disi.exists = true; + return true; + } + } + return false; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + // TODO: binary search + if (target == disi.doc) { + return disi.exists; + } + for (; disi.index < disi.nextBlockIndex;) { + int doc = Short.toUnsignedInt(disi.slice.readShort()); + disi.index++; + if (doc >= targetInBlock) { + if (doc != targetInBlock) { + disi.index--; + disi.slice.seek(disi.slice.getFilePointer() - Short.BYTES); + break; + } + disi.exists = true; + return true; + } + } + disi.exists = false; + return false; + } + }, + DENSE { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + final int targetWordIndex = targetInBlock >>> 6; + for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) { + disi.word = disi.slice.readLong(); + disi.numberOfOnes += Long.bitCount(disi.word); + } + disi.wordIndex = targetWordIndex; + + long leftBits = disi.word >>> target; + if (leftBits != 0L) { + disi.doc = target + Long.numberOfTrailingZeros(leftBits); + disi.index = disi.numberOfOnes - Long.bitCount(leftBits); + return true; + } + + while (++disi.wordIndex < 1024) { + disi.word = disi.slice.readLong(); + if (disi.word != 0) { + disi.index = disi.numberOfOnes; + disi.numberOfOnes += Long.bitCount(disi.word); + disi.doc = disi.block | (disi.wordIndex << 6) | Long.numberOfTrailingZeros(disi.word); + return true; + } + } + return false; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + final int targetInBlock = target & 0xFFFF; + final int targetWordIndex = targetInBlock >>> 6; + for (int i = disi.wordIndex + 1; i <= targetWordIndex; ++i) { + disi.word = disi.slice.readLong(); + disi.numberOfOnes += Long.bitCount(disi.word); + } + disi.wordIndex = targetWordIndex; + + long leftBits = disi.word >>> target; + disi.index = disi.numberOfOnes - Long.bitCount(leftBits); + return (leftBits & 1L) != 0; + } + }, + ALL { + @Override + boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException { + disi.doc = target; + disi.index = target - disi.gap; + return true; + } + + @Override + boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException { + disi.index = target - disi.gap; + return true; + } + }; + + /** + * Advance to the first doc from the block that is equal to or greater than {@code target}. + * Return true if there is such a doc and false otherwise. + */ + abstract boolean advanceWithinBlock(IndexedDISI disi, int target) throws IOException; + + /** + * Advance the iterator exactly to the position corresponding to the given {@code target} and + * return whether this document exists. + */ + abstract boolean advanceExactWithinBlock(IndexedDISI disi, int target) throws IOException; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java new file mode 100644 index 000000000000..77de24b53069 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70Codec.java @@ -0,0 +1,15 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +public class Lucene70Codec extends BWCLucene70Codec { + + public Lucene70Codec() { + super("Lucene70"); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java new file mode 100644 index 000000000000..1d35a60235d3 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesConsumer.java @@ -0,0 +1,681 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectMonotonicWriter; +import org.apache.lucene.backward_codecs.packed.LegacyDirectWriter; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.EmptyDocValuesProducer; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.SortedSetSelector; +import org.apache.lucene.store.ByteBuffersDataOutput; +import org.apache.lucene.store.ByteBuffersIndexOutput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.MathUtil; +import org.apache.lucene.util.StringHelper; +import org.elasticsearch.core.IOUtils; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.DIRECT_MONOTONIC_BLOCK_SHIFT; +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SHIFT; +import static org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat.NUMERIC_BLOCK_SIZE; + +/** writer for {@link Lucene70DocValuesFormat} */ +final class Lucene70DocValuesConsumer extends DocValuesConsumer { + + IndexOutput data, meta; + final int maxDoc; + + /** expert: Creates a new writer */ + Lucene70DocValuesConsumer(SegmentWriteState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) + throws IOException { + boolean success = false; + try { + String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); + data = EndiannessReverserUtil.createOutput(state.directory, dataName, state.context); + CodecUtil.writeIndexHeader( + data, + dataCodec, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); + meta = EndiannessReverserUtil.createOutput(state.directory, metaName, state.context); + CodecUtil.writeIndexHeader( + meta, + metaCodec, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + maxDoc = state.segmentInfo.maxDoc(); + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this); + } + } + } + + @Override + public void close() throws IOException { + boolean success = false; + try { + if (meta != null) { + meta.writeInt(-1); // write EOF marker + CodecUtil.writeFooter(meta); // write checksum + } + if (data != null) { + CodecUtil.writeFooter(data); // write checksum + } + success = true; + } finally { + if (success) { + IOUtils.close(data, meta); + } else { + IOUtils.closeWhileHandlingException(data, meta); + } + meta = data = null; + } + } + + @Override + public void addNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.NUMERIC); + + writeValues(field, new EmptyDocValuesProducer() { + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + return DocValues.singleton(valuesProducer.getNumeric(field)); + } + }); + } + + private static class MinMaxTracker { + long min, max, numValues, spaceInBits; + + MinMaxTracker() { + reset(); + spaceInBits = 0; + } + + private void reset() { + min = Long.MAX_VALUE; + max = Long.MIN_VALUE; + numValues = 0; + } + + /** Accumulate a new value. */ + void update(long v) { + min = Math.min(min, v); + max = Math.max(max, v); + ++numValues; + } + + /** Update the required space. */ + void finish() { + if (max > min) { + spaceInBits += LegacyDirectWriter.unsignedBitsRequired(max - min) * numValues; + } + } + + /** Update space usage and get ready for accumulating values for the next block. */ + void nextBlock() { + finish(); + reset(); + } + } + + private long[] writeValues(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + int numDocsWithValue = 0; + MinMaxTracker minMax = new MinMaxTracker(); + MinMaxTracker blockMinMax = new MinMaxTracker(); + long gcd = 0; + Set uniqueValues = new HashSet<>(); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + long v = values.nextValue(); + + if (gcd != 1) { + if (v < Long.MIN_VALUE / 2 || v > Long.MAX_VALUE / 2) { + // in that case v - minValue might overflow and make the GCD computation return + // wrong results. Since these extreme values are unlikely, we just discard + // GCD computation for them + gcd = 1; + } else if (minMax.numValues != 0) { // minValue needs to be set first + gcd = MathUtil.gcd(gcd, v - minMax.min); + } + } + + minMax.update(v); + blockMinMax.update(v); + if (blockMinMax.numValues == NUMERIC_BLOCK_SIZE) { + blockMinMax.nextBlock(); + } + + if (uniqueValues != null && uniqueValues.add(v) && uniqueValues.size() > 256) { + uniqueValues = null; + } + } + + numDocsWithValue++; + } + + minMax.finish(); + blockMinMax.finish(); + + final long numValues = minMax.numValues; + long min = minMax.min; + final long max = minMax.max; + assert blockMinMax.spaceInBits <= minMax.spaceInBits; + + if (numDocsWithValue == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithValue == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSortedNumeric(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeLong(numValues); + final int numBitsPerValue; + boolean doBlocks = false; + Map encode = null; + if (min >= max) { + numBitsPerValue = 0; + meta.writeInt(-1); + } else { + if (uniqueValues != null + && uniqueValues.size() > 1 + && LegacyDirectWriter.unsignedBitsRequired(uniqueValues.size() - 1) < LegacyDirectWriter.unsignedBitsRequired( + (max - min) / gcd + )) { + numBitsPerValue = LegacyDirectWriter.unsignedBitsRequired(uniqueValues.size() - 1); + final Long[] sortedUniqueValues = uniqueValues.toArray(new Long[0]); + Arrays.sort(sortedUniqueValues); + meta.writeInt(sortedUniqueValues.length); + for (Long v : sortedUniqueValues) { + meta.writeLong(v); + } + encode = new HashMap<>(); + for (int i = 0; i < sortedUniqueValues.length; ++i) { + encode.put(sortedUniqueValues[i], i); + } + min = 0; + gcd = 1; + } else { + uniqueValues = null; + // we do blocks if that appears to save 10+% storage + doBlocks = minMax.spaceInBits > 0 && (double) blockMinMax.spaceInBits / minMax.spaceInBits <= 0.9; + if (doBlocks) { + numBitsPerValue = 0xFF; + meta.writeInt(-2 - NUMERIC_BLOCK_SHIFT); + } else { + numBitsPerValue = LegacyDirectWriter.unsignedBitsRequired((max - min) / gcd); + if (gcd == 1 + && min > 0 + && LegacyDirectWriter.unsignedBitsRequired(max) == LegacyDirectWriter.unsignedBitsRequired(max - min)) { + min = 0; + } + meta.writeInt(-1); + } + } + } + + meta.writeByte((byte) numBitsPerValue); + meta.writeLong(min); + meta.writeLong(gcd); + long startOffset = data.getFilePointer(); + meta.writeLong(startOffset); + if (doBlocks) { + writeValuesMultipleBlocks(valuesProducer.getSortedNumeric(field), gcd); + } else if (numBitsPerValue != 0) { + writeValuesSingleBlock(valuesProducer.getSortedNumeric(field), numValues, numBitsPerValue, min, gcd, encode); + } + meta.writeLong(data.getFilePointer() - startOffset); + + return new long[] { numDocsWithValue, numValues }; + } + + private void writeValuesSingleBlock( + SortedNumericDocValues values, + long numValues, + int numBitsPerValue, + long min, + long gcd, + Map encode + ) throws IOException { + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numValues, numBitsPerValue); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + long v = values.nextValue(); + if (encode == null) { + writer.add((v - min) / gcd); + } else { + writer.add(encode.get(v)); + } + } + } + writer.finish(); + } + + private void writeValuesMultipleBlocks(SortedNumericDocValues values, long gcd) throws IOException { + final long[] buffer = new long[NUMERIC_BLOCK_SIZE]; + final ByteBuffersDataOutput encodeBuffer = ByteBuffersDataOutput.newResettableInstance(); + int upTo = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + buffer[upTo++] = values.nextValue(); + if (upTo == NUMERIC_BLOCK_SIZE) { + writeBlock(buffer, NUMERIC_BLOCK_SIZE, gcd, encodeBuffer); + upTo = 0; + } + } + } + if (upTo > 0) { + writeBlock(buffer, upTo, gcd, encodeBuffer); + } + } + + private void writeBlock(long[] values, int length, long gcd, ByteBuffersDataOutput buffer) throws IOException { + assert length > 0; + long min = values[0]; + long max = values[0]; + for (int i = 1; i < length; ++i) { + final long v = values[i]; + assert Math.floorMod(values[i] - min, gcd) == 0; + min = Math.min(min, v); + max = Math.max(max, v); + } + if (min == max) { + data.writeByte((byte) 0); + data.writeLong(min); + } else { + final int bitsPerValue = LegacyDirectWriter.unsignedBitsRequired(max - min); + buffer.reset(); + assert buffer.size() == 0; + final LegacyDirectWriter w = LegacyDirectWriter.getInstance(buffer, length, bitsPerValue); + for (int i = 0; i < length; ++i) { + w.add((values[i] - min) / gcd); + } + w.finish(); + data.writeByte((byte) bitsPerValue); + data.writeLong(min); + data.writeInt(Math.toIntExact(buffer.size())); + buffer.copyTo(data); + } + } + + @Override + public void addBinaryField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.BINARY); + + BinaryDocValues values = valuesProducer.getBinary(field); + long start = data.getFilePointer(); + meta.writeLong(start); + int numDocsWithField = 0; + int minLength = Integer.MAX_VALUE; + int maxLength = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + BytesRef v = values.binaryValue(); + int length = v.length; + data.writeBytes(v.bytes, v.offset, v.length); + minLength = Math.min(length, minLength); + maxLength = Math.max(length, maxLength); + } + assert numDocsWithField <= maxDoc; + meta.writeLong(data.getFilePointer() - start); + + if (numDocsWithField == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getBinary(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeInt(numDocsWithField); + meta.writeInt(minLength); + meta.writeInt(maxLength); + if (maxLength > minLength) { + start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + writer.add(addr); + values = valuesProducer.getBinary(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + addr += values.binaryValue().length; + writer.add(addr); + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + } + } + + @Override + public void addSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED); + doAddSortedField(field, valuesProducer); + } + + private void doAddSortedField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + SortedDocValues values = valuesProducer.getSorted(field); + int numDocsWithField = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + } + + if (numDocsWithField == 0) { + meta.writeLong(-2); + meta.writeLong(0L); + } else if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSorted(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + meta.writeInt(numDocsWithField); + if (values.getValueCount() <= 1) { + meta.writeByte((byte) 0); + meta.writeLong(0L); + meta.writeLong(0L); + } else { + int numberOfBitsPerOrd = LegacyDirectWriter.unsignedBitsRequired(values.getValueCount() - 1); + meta.writeByte((byte) numberOfBitsPerOrd); + long start = data.getFilePointer(); + meta.writeLong(start); + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numDocsWithField, numberOfBitsPerOrd); + values = valuesProducer.getSorted(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + writer.add(values.ordValue()); + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + } + + addTermsDict(DocValues.singleton(valuesProducer.getSorted(field))); + } + + private void addTermsDict(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeVLong(size); + meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT); + + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + ByteBuffersIndexOutput addressIndexOut = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp"); + meta.writeInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + long numBlocks = (size + Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) >>> Lucene70DocValuesFormat.TERMS_DICT_BLOCK_SHIFT; + LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + addressIndexOut, + numBlocks, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + + BytesRefBuilder previous = new BytesRefBuilder(); + long ord = 0; + long start = data.getFilePointer(); + int maxLength = 0; + TermsEnum iterator = values.termsEnum(); + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & Lucene70DocValuesFormat.TERMS_DICT_BLOCK_MASK) == 0) { + writer.add(data.getFilePointer() - start); + data.writeVInt(term.length); + data.writeBytes(term.bytes, term.offset, term.length); + } else { + final int prefixLength = StringHelper.bytesDifference(previous.get(), term); + final int suffixLength = term.length - prefixLength; + assert suffixLength > 0; // terms are unique + + data.writeByte((byte) (Math.min(prefixLength, 15) | (Math.min(15, suffixLength - 1) << 4))); + if (prefixLength >= 15) { + data.writeVInt(prefixLength - 15); + } + if (suffixLength >= 16) { + data.writeVInt(suffixLength - 16); + } + data.writeBytes(term.bytes, term.offset + prefixLength, term.length - prefixLength); + } + maxLength = Math.max(maxLength, term.length); + previous.copyBytes(term); + ++ord; + } + writer.finish(); + meta.writeInt(maxLength); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + + // Now write the reverse terms index + writeTermsIndex(values); + } + + private void writeTermsIndex(SortedSetDocValues values) throws IOException { + final long size = values.getValueCount(); + meta.writeInt(Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + long start = data.getFilePointer(); + + long numBlocks = 1L + ((size + Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) + >>> Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_SHIFT); + ByteBuffersDataOutput addressBuffer = new ByteBuffersDataOutput(); + ByteBuffersIndexOutput addressIndexOut = new ByteBuffersIndexOutput(addressBuffer, "temp", "temp"); + LegacyDirectMonotonicWriter writer = LegacyDirectMonotonicWriter.getInstance( + meta, + addressIndexOut, + numBlocks, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + + TermsEnum iterator = values.termsEnum(); + BytesRefBuilder previous = new BytesRefBuilder(); + long offset = 0; + long ord = 0; + for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { + if ((ord & Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == 0) { + writer.add(offset); + final int sortKeyLength; + if (ord == 0) { + // no previous term: no bytes to write + sortKeyLength = 0; + } else { + sortKeyLength = StringHelper.sortKeyLength(previous.get(), term); + } + offset += sortKeyLength; + data.writeBytes(term.bytes, term.offset, sortKeyLength); + } else if ((ord + & Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) == Lucene70DocValuesFormat.TERMS_DICT_REVERSE_INDEX_MASK) { + previous.copyBytes(term); + } + ++ord; + } + writer.add(offset); + writer.finish(); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + start = data.getFilePointer(); + addressBuffer.copyTo(data); + meta.writeLong(start); + meta.writeLong(data.getFilePointer() - start); + } + + @Override + public void addSortedNumericField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED_NUMERIC); + + long[] stats = writeValues(field, valuesProducer); + int numDocsWithField = Math.toIntExact(stats[0]); + long numValues = stats[1]; + assert numValues >= numDocsWithField; + + meta.writeInt(numDocsWithField); + if (numValues > numDocsWithField) { + long start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter addressesWriter = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1L, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + addressesWriter.add(addr); + SortedNumericDocValues values = valuesProducer.getSortedNumeric(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + addr += values.docValueCount(); + addressesWriter.add(addr); + } + addressesWriter.finish(); + meta.writeLong(data.getFilePointer() - start); + } + } + + @Override + public void addSortedSetField(FieldInfo field, DocValuesProducer valuesProducer) throws IOException { + meta.writeInt(field.number); + meta.writeByte(Lucene70DocValuesFormat.SORTED_SET); + + SortedSetDocValues values = valuesProducer.getSortedSet(field); + int numDocsWithField = 0; + long numOrds = 0; + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + numDocsWithField++; + numOrds += values.docValueCount(); + } + + if (numDocsWithField == numOrds) { + meta.writeByte((byte) 0); + doAddSortedField(field, new EmptyDocValuesProducer() { + @Override + public SortedDocValues getSorted(FieldInfo field) throws IOException { + return SortedSetSelector.wrap(valuesProducer.getSortedSet(field), SortedSetSelector.Type.MIN); + } + }); + return; + } + meta.writeByte((byte) 1); + + assert numDocsWithField != 0; + if (numDocsWithField == maxDoc) { + meta.writeLong(-1); + meta.writeLong(0L); + } else { + long offset = data.getFilePointer(); + meta.writeLong(offset); + values = valuesProducer.getSortedSet(field); + IndexedDISI.writeBitSet(values, data); + meta.writeLong(data.getFilePointer() - offset); + } + + int numberOfBitsPerOrd = LegacyDirectWriter.unsignedBitsRequired(values.getValueCount() - 1); + meta.writeByte((byte) numberOfBitsPerOrd); + long start = data.getFilePointer(); + meta.writeLong(start); + LegacyDirectWriter writer = LegacyDirectWriter.getInstance(data, numOrds, numberOfBitsPerOrd); + values = valuesProducer.getSortedSet(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + for (int i = 0; i < values.docValueCount(); i++) { + writer.add(values.nextOrd()); + } + } + writer.finish(); + meta.writeLong(data.getFilePointer() - start); + + meta.writeInt(numDocsWithField); + start = data.getFilePointer(); + meta.writeLong(start); + meta.writeVInt(DIRECT_MONOTONIC_BLOCK_SHIFT); + + final LegacyDirectMonotonicWriter addressesWriter = LegacyDirectMonotonicWriter.getInstance( + meta, + data, + numDocsWithField + 1, + DIRECT_MONOTONIC_BLOCK_SHIFT + ); + long addr = 0; + addressesWriter.add(addr); + values = valuesProducer.getSortedSet(field); + for (int doc = values.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = values.nextDoc()) { + values.nextOrd(); + addr += values.docValueCount(); + addressesWriter.add(addr); + } + addressesWriter.finish(); + meta.writeLong(data.getFilePointer() - start); + + addTermsDict(values); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java new file mode 100644 index 000000000000..76fce4cd15c9 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormat.java @@ -0,0 +1,171 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectWriter; +import org.apache.lucene.codecs.DocValuesConsumer; +import org.apache.lucene.codecs.DocValuesFormat; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.util.SmallFloat; + +import java.io.IOException; + +/** + * Lucene 7.0 DocValues format. + * + *

    Documents that have a value for the field are encoded in a way that it is always possible to + * know the ordinal of the current document in the set of documents that have a value. For instance, + * say the set of documents that have a value for the field is {1, 5, 6, 11}. When the + * iterator is on 6, it knows that this is the 3rd item of the set. This way, values + * can be stored densely and accessed based on their index at search time. If all documents in a + * segment have a value for the field, the index is the same as the doc ID, so this case is encoded + * implicitly and is very fast at query time. On the other hand if some documents are missing a + * value for the field then the set of documents that have a value is encoded into blocks. All doc + * IDs that share the same upper 16 bits are encoded into the same block with the following + * strategies: + * + *

      + *
    • SPARSE: This strategy is used when a block contains at most 4095 documents. The lower 16 + * bits of doc IDs are stored as {@link DataOutput#writeShort(short) shorts} while the upper + * 16 bits are given by the block ID. + *
    • DENSE: This strategy is used when a block contains between 4096 and 65535 documents. The + * lower bits of doc IDs are stored in a bit set. Advancing is performed using {@link + * Long#numberOfTrailingZeros(long) ntz} operations while the index is computed by + * accumulating the {@link Long#bitCount(long) bit counts} of the visited longs. + *
    • ALL: This strategy is used when a block contains exactly 65536 documents, meaning that the + * block is full. In that case doc IDs do not need to be stored explicitly. This is typically + * faster than both SPARSE and DENSE which is a reason why it is preferable to have all + * documents that have a value for a field using contiguous doc IDs, for instance by using + * {@link IndexWriterConfig#setIndexSort(org.apache.lucene.search.Sort) index sorting}. + *
    + * + *

    Then the five per-document value types (Numeric,Binary,Sorted,SortedSet,SortedNumeric) are + * encoded using the following strategies: + * + *

    {@link DocValuesType#NUMERIC NUMERIC}: + * + *

      + *
    • Delta-compressed: per-document integers written as deltas from the minimum value, + * compressed with bitpacking. For more information, see {@link LegacyDirectWriter}. + *
    • Table-compressed: when the number of unique values is very small (< 256), and when there + * are unused "gaps" in the range of values used (such as {@link SmallFloat}), a lookup table + * is written instead. Each per-document entry is instead the ordinal to this table, and those + * ordinals are compressed with bitpacking ({@link LegacyDirectWriter}). + *
    • GCD-compressed: when all numbers share a common divisor, such as dates, the greatest common + * denominator (GCD) is computed, and quotients are stored using Delta-compressed Numerics. + *
    • Monotonic-compressed: when all numbers are monotonically increasing offsets, they are + * written as blocks of bitpacked integers, encoding the deviation from the expected delta. + *
    • Const-compressed: when there is only one possible value, no per-document data is needed and + * this value is encoded alone. + *
    + * + *

    {@link DocValuesType#BINARY BINARY}: + * + *

      + *
    • Fixed-width Binary: one large concatenated byte[] is written, along with the fixed length. + * Each document's value can be addressed directly with multiplication ({@code docID * + * length}). + *
    • Variable-width Binary: one large concatenated byte[] is written, along with end addresses + * for each document. The addresses are written as Monotonic-compressed numerics. + *
    • Prefix-compressed Binary: values are written in chunks of 16, with the first value written + * completely and other values sharing prefixes. chunk addresses are written as + * Monotonic-compressed numerics. A reverse lookup index is written from a portion of every + * 1024th term. + *
    + * + *

    {@link DocValuesType#SORTED SORTED}: + * + *

      + *
    • Sorted: a mapping of ordinals to deduplicated terms is written as Prefix-compressed Binary, + * along with the per-document ordinals written using one of the numeric strategies above. + *
    + * + *

    {@link DocValuesType#SORTED_SET SORTED_SET}: + * + *

      + *
    • Single: if all documents have 0 or 1 value, then data are written like SORTED. + *
    • SortedSet: a mapping of ordinals to deduplicated terms is written as Binary, an ordinal + * list and per-document index into this list are written using the numeric strategies above. + *
    + * + *

    {@link DocValuesType#SORTED_NUMERIC SORTED_NUMERIC}: + * + *

      + *
    • Single: if all documents have 0 or 1 value, then data are written like NUMERIC. + *
    • SortedNumeric: a value list and per-document index into this list are written using the + * numeric strategies above. + *
    + * + *

    Files: + * + *

      + *
    1. .dvd: DocValues data + *
    2. .dvm: DocValues metadata + *
    + */ +public final class Lucene70DocValuesFormat extends DocValuesFormat { + + /** Sole Constructor */ + public Lucene70DocValuesFormat() { + super("Lucene70"); + } + + @Override + public DocValuesConsumer fieldsConsumer(SegmentWriteState state) throws IOException { + return new Lucene70DocValuesConsumer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + } + + @Override + public DocValuesProducer fieldsProducer(SegmentReadState state) throws IOException { + return new Lucene70DocValuesProducer(state, DATA_CODEC, DATA_EXTENSION, META_CODEC, META_EXTENSION); + } + + static final String DATA_CODEC = "Lucene70DocValuesData"; + static final String DATA_EXTENSION = "dvd"; + static final String META_CODEC = "Lucene70DocValuesMetadata"; + static final String META_EXTENSION = "dvm"; + static final int VERSION_START = 0; + static final int VERSION_CURRENT = VERSION_START; + + // indicates docvalues type + static final byte NUMERIC = 0; + static final byte BINARY = 1; + static final byte SORTED = 2; + static final byte SORTED_SET = 3; + static final byte SORTED_NUMERIC = 4; + + static final int DIRECT_MONOTONIC_BLOCK_SHIFT = 16; + + static final int NUMERIC_BLOCK_SHIFT = 14; + static final int NUMERIC_BLOCK_SIZE = 1 << NUMERIC_BLOCK_SHIFT; + + static final int TERMS_DICT_BLOCK_SHIFT = 4; + static final int TERMS_DICT_BLOCK_SIZE = 1 << TERMS_DICT_BLOCK_SHIFT; + static final int TERMS_DICT_BLOCK_MASK = TERMS_DICT_BLOCK_SIZE - 1; + + static final int TERMS_DICT_REVERSE_INDEX_SHIFT = 10; + static final int TERMS_DICT_REVERSE_INDEX_SIZE = 1 << TERMS_DICT_REVERSE_INDEX_SHIFT; + static final int TERMS_DICT_REVERSE_INDEX_MASK = TERMS_DICT_REVERSE_INDEX_SIZE - 1; +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java new file mode 100644 index 000000000000..5164a67c428b --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesProducer.java @@ -0,0 +1,1461 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2021 Elasticsearch B.V. + */ +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import org.apache.lucene.backward_codecs.packed.LegacyDirectMonotonicReader; +import org.apache.lucene.backward_codecs.packed.LegacyDirectReader; +import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.DocValuesProducer; +import org.apache.lucene.index.BaseTermsEnum; +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.DocValues; +import org.apache.lucene.index.DocValuesSkipper; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.ImpactsEnum; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.index.TermsEnum.SeekStatus; +import org.apache.lucene.store.ChecksumIndexInput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.RandomAccessInput; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.LongValues; +import org.elasticsearch.core.IOUtils; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +/** reader for {@link Lucene70DocValuesFormat} */ +final class Lucene70DocValuesProducer extends DocValuesProducer { + private final Map numerics = new HashMap<>(); + private final Map binaries = new HashMap<>(); + private final Map sorted = new HashMap<>(); + private final Map sortedSets = new HashMap<>(); + private final Map sortedNumerics = new HashMap<>(); + private final IndexInput data; + private final int maxDoc; + + static final long NO_MORE_ORDS = -1; + + /** expert: instantiates a new reader */ + Lucene70DocValuesProducer(SegmentReadState state, String dataCodec, String dataExtension, String metaCodec, String metaExtension) + throws IOException { + String metaName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension); + this.maxDoc = state.segmentInfo.maxDoc(); + + int version = -1; + + // read in the entries from the metadata file. + try (ChecksumIndexInput in = EndiannessReverserUtil.openChecksumInput(state.directory, metaName, state.context)) { + Throwable priorE = null; + try { + version = CodecUtil.checkIndexHeader( + in, + metaCodec, + Lucene70DocValuesFormat.VERSION_START, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + readFields(in, state.fieldInfos); + } catch (Throwable exception) { + priorE = exception; + } finally { + CodecUtil.checkFooter(in, priorE); + } + } + + String dataName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension); + this.data = EndiannessReverserUtil.openInput(state.directory, dataName, state.context); + boolean success = false; + try { + final int version2 = CodecUtil.checkIndexHeader( + data, + dataCodec, + Lucene70DocValuesFormat.VERSION_START, + Lucene70DocValuesFormat.VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + if (version != version2) { + throw new CorruptIndexException("Format versions mismatch: meta=" + version + ", data=" + version2, data); + } + + // NOTE: data file is too costly to verify checksum against all the bytes on open, + // but for now we at least verify proper structure of the checksum footer: which looks + // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption + // such as file truncation. + CodecUtil.retrieveChecksum(data); + + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(this.data); + } + } + } + + private void readFields(ChecksumIndexInput meta, FieldInfos infos) throws IOException { + for (int fieldNumber = meta.readInt(); fieldNumber != -1; fieldNumber = meta.readInt()) { + FieldInfo info = infos.fieldInfo(fieldNumber); + if (info == null) { + throw new CorruptIndexException("Invalid field number: " + fieldNumber, meta); + } + byte type = meta.readByte(); + if (type == Lucene70DocValuesFormat.NUMERIC) { + numerics.put(info.name, readNumeric(meta)); + } else if (type == Lucene70DocValuesFormat.BINARY) { + binaries.put(info.name, readBinary(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED) { + sorted.put(info.name, readSorted(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED_SET) { + sortedSets.put(info.name, readSortedSet(meta)); + } else if (type == Lucene70DocValuesFormat.SORTED_NUMERIC) { + sortedNumerics.put(info.name, readSortedNumeric(meta)); + } else { + throw new CorruptIndexException("invalid type: " + type, meta); + } + } + } + + private NumericEntry readNumeric(ChecksumIndexInput meta) throws IOException { + NumericEntry entry = new NumericEntry(); + readNumeric(meta, entry); + return entry; + } + + private void readNumeric(ChecksumIndexInput meta, NumericEntry entry) throws IOException { + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numValues = meta.readLong(); + int tableSize = meta.readInt(); + if (tableSize > 256) { + throw new CorruptIndexException("invalid table size: " + tableSize, meta); + } + if (tableSize >= 0) { + entry.table = new long[tableSize]; + for (int i = 0; i < tableSize; ++i) { + entry.table[i] = meta.readLong(); + } + } + if (tableSize < -1) { + entry.blockShift = -2 - tableSize; + } else { + entry.blockShift = -1; + } + entry.bitsPerValue = meta.readByte(); + entry.minValue = meta.readLong(); + entry.gcd = meta.readLong(); + entry.valuesOffset = meta.readLong(); + entry.valuesLength = meta.readLong(); + } + + private BinaryEntry readBinary(ChecksumIndexInput meta) throws IOException { + BinaryEntry entry = new BinaryEntry(); + entry.dataOffset = meta.readLong(); + entry.dataLength = meta.readLong(); + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.minLength = meta.readInt(); + entry.maxLength = meta.readInt(); + if (entry.minLength < entry.maxLength) { + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1L, blockShift); + entry.addressesLength = meta.readLong(); + } + return entry; + } + + private SortedEntry readSorted(ChecksumIndexInput meta) throws IOException { + SortedEntry entry = new SortedEntry(); + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.bitsPerValue = meta.readByte(); + entry.ordsOffset = meta.readLong(); + entry.ordsLength = meta.readLong(); + readTermDict(meta, entry); + return entry; + } + + private SortedSetEntry readSortedSet(ChecksumIndexInput meta) throws IOException { + SortedSetEntry entry = new SortedSetEntry(); + byte multiValued = meta.readByte(); + switch (multiValued) { + case 0: // singlevalued + entry.singleValueEntry = readSorted(meta); + return entry; + case 1: // multivalued + break; + default: + throw new CorruptIndexException("Invalid multiValued flag: " + multiValued, meta); + } + entry.docsWithFieldOffset = meta.readLong(); + entry.docsWithFieldLength = meta.readLong(); + entry.bitsPerValue = meta.readByte(); + entry.ordsOffset = meta.readLong(); + entry.ordsLength = meta.readLong(); + entry.numDocsWithField = meta.readInt(); + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); + entry.addressesLength = meta.readLong(); + readTermDict(meta, entry); + return entry; + } + + private static void readTermDict(ChecksumIndexInput meta, TermsDictEntry entry) throws IOException { + entry.termsDictSize = meta.readVLong(); + entry.termsDictBlockShift = meta.readInt(); + final int blockShift = meta.readInt(); + final long addressesSize = (entry.termsDictSize + (1L << entry.termsDictBlockShift) - 1) >>> entry.termsDictBlockShift; + entry.termsAddressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, addressesSize, blockShift); + entry.maxTermLength = meta.readInt(); + entry.termsDataOffset = meta.readLong(); + entry.termsDataLength = meta.readLong(); + entry.termsAddressesOffset = meta.readLong(); + entry.termsAddressesLength = meta.readLong(); + entry.termsDictIndexShift = meta.readInt(); + final long indexSize = (entry.termsDictSize + (1L << entry.termsDictIndexShift) - 1) >>> entry.termsDictIndexShift; + entry.termsIndexAddressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, 1 + indexSize, blockShift); + entry.termsIndexOffset = meta.readLong(); + entry.termsIndexLength = meta.readLong(); + entry.termsIndexAddressesOffset = meta.readLong(); + entry.termsIndexAddressesLength = meta.readLong(); + } + + private SortedNumericEntry readSortedNumeric(ChecksumIndexInput meta) throws IOException { + SortedNumericEntry entry = new SortedNumericEntry(); + readNumeric(meta, entry); + entry.numDocsWithField = meta.readInt(); + if (entry.numDocsWithField != entry.numValues) { + entry.addressesOffset = meta.readLong(); + final int blockShift = meta.readVInt(); + entry.addressesMeta = LegacyDirectMonotonicReader.loadMeta(meta, entry.numDocsWithField + 1, blockShift); + entry.addressesLength = meta.readLong(); + } + return entry; + } + + @Override + public void close() throws IOException { + data.close(); + } + + private static class NumericEntry { + long[] table; + int blockShift; + byte bitsPerValue; + long docsWithFieldOffset; + long docsWithFieldLength; + long numValues; + long minValue; + long gcd; + long valuesOffset; + long valuesLength; + } + + private static class BinaryEntry { + long dataOffset; + long dataLength; + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + int minLength; + int maxLength; + long addressesOffset; + long addressesLength; + LegacyDirectMonotonicReader.Meta addressesMeta; + } + + private static class TermsDictEntry { + long termsDictSize; + int termsDictBlockShift; + LegacyDirectMonotonicReader.Meta termsAddressesMeta; + int maxTermLength; + long termsDataOffset; + long termsDataLength; + long termsAddressesOffset; + long termsAddressesLength; + int termsDictIndexShift; + LegacyDirectMonotonicReader.Meta termsIndexAddressesMeta; + long termsIndexOffset; + long termsIndexLength; + long termsIndexAddressesOffset; + long termsIndexAddressesLength; + } + + private static class SortedEntry extends TermsDictEntry { + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + byte bitsPerValue; + long ordsOffset; + long ordsLength; + } + + private static class SortedSetEntry extends TermsDictEntry { + SortedEntry singleValueEntry; + long docsWithFieldOffset; + long docsWithFieldLength; + int numDocsWithField; + byte bitsPerValue; + long ordsOffset; + long ordsLength; + LegacyDirectMonotonicReader.Meta addressesMeta; + long addressesOffset; + long addressesLength; + } + + private static class SortedNumericEntry extends NumericEntry { + int numDocsWithField; + LegacyDirectMonotonicReader.Meta addressesMeta; + long addressesOffset; + long addressesLength; + } + + @Override + public NumericDocValues getNumeric(FieldInfo field) throws IOException { + NumericEntry entry = numerics.get(field.name); + return getNumeric(entry); + } + + private abstract static class DenseNumericDocValues extends NumericDocValues { + + final int maxDoc; + int doc = -1; + + DenseNumericDocValues(int maxDoc) { + this.maxDoc = maxDoc; + } + + @Override + public int docID() { + return doc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) { + doc = target; + return true; + } + + @Override + public long cost() { + return maxDoc; + } + } + + private abstract static class SparseNumericDocValues extends NumericDocValues { + + final IndexedDISI disi; + + SparseNumericDocValues(IndexedDISI disi) { + this.disi = disi; + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + } + + private NumericDocValues getNumeric(NumericEntry entry) throws IOException { + if (entry.docsWithFieldOffset == -2) { + // empty + return DocValues.emptyNumeric(); + } else if (entry.docsWithFieldOffset == -1) { + // dense + if (entry.bitsPerValue == 0) { + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + // dense but split into blocks of different bits per value + final int shift = entry.blockShift; + final long mul = entry.gcd; + final int mask = (1 << shift) - 1; + return new DenseNumericDocValues(maxDoc) { + int block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long longValue() throws IOException { + final int block = doc >>> shift; + if (this.block != block) { + int bitsPerValue; + do { + offset = blockEndOffset; + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 + ? LongValues.ZEROES + : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(doc & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return table[(int) values.get(doc)]; + } + }; + } else { + final long mul = entry.gcd; + final long delta = entry.minValue; + return new DenseNumericDocValues(maxDoc) { + @Override + public long longValue() throws IOException { + return mul * values.get(doc) + delta; + } + }; + } + } + } + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numValues); + if (entry.bitsPerValue == 0) { + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + // sparse and split into blocks of different bits per value + final int shift = entry.blockShift; + final long mul = entry.gcd; + final int mask = (1 << shift) - 1; + return new SparseNumericDocValues(disi) { + int block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long longValue() throws IOException { + final int index = disi.index(); + final int block = index >>> shift; + if (this.block != block) { + int bitsPerValue; + do { + offset = blockEndOffset; + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 + ? LongValues.ZEROES + : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(index & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return table[(int) values.get(disi.index())]; + } + }; + } else { + final long mul = entry.gcd; + final long delta = entry.minValue; + return new SparseNumericDocValues(disi) { + @Override + public long longValue() throws IOException { + return mul * values.get(disi.index()) + delta; + } + }; + } + } + } + } + } + + private LongValues getNumericValues(NumericEntry entry) throws IOException { + if (entry.bitsPerValue == 0) { + return new LongValues() { + @Override + public long get(long index) { + return entry.minValue; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.valuesOffset, entry.valuesLength); + if (entry.blockShift >= 0) { + final int shift = entry.blockShift; + final long mul = entry.gcd; + final long mask = (1L << shift) - 1; + return new LongValues() { + long block = -1; + long delta; + long offset; + long blockEndOffset; + LongValues values; + + @Override + public long get(long index) { + final long block = index >>> shift; + if (this.block != block) { + assert block > this.block : "Reading backwards is illegal: " + this.block + " < " + block; + int bitsPerValue; + do { + offset = blockEndOffset; + try { + bitsPerValue = slice.readByte(offset++); + delta = slice.readLong(offset); + offset += Long.BYTES; + if (bitsPerValue == 0) { + blockEndOffset = offset; + } else { + final int length = slice.readInt(offset); + offset += Integer.BYTES; + blockEndOffset = offset + length; + } + } catch (IOException e) { + throw new RuntimeException(e); + } + this.block++; + } while (this.block != block); + values = bitsPerValue == 0 ? LongValues.ZEROES : LegacyDirectReader.getInstance(slice, bitsPerValue, offset); + } + return mul * values.get(index & mask) + delta; + } + }; + } else { + final LongValues values = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + if (entry.table != null) { + final long[] table = entry.table; + return new LongValues() { + @Override + public long get(long index) { + return table[(int) values.get(index)]; + } + }; + } else if (entry.gcd != 1) { + final long gcd = entry.gcd; + final long minValue = entry.minValue; + return new LongValues() { + @Override + public long get(long index) { + return values.get(index) * gcd + minValue; + } + }; + } else if (entry.minValue != 0) { + final long minValue = entry.minValue; + return new LongValues() { + @Override + public long get(long index) { + return values.get(index) + minValue; + } + }; + } else { + return values; + } + } + } + } + + private abstract static class DenseBinaryDocValues extends BinaryDocValues { + + final int maxDoc; + int doc = -1; + + DenseBinaryDocValues(int maxDoc) { + this.maxDoc = maxDoc; + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + doc = target; + return true; + } + } + + private abstract static class SparseBinaryDocValues extends BinaryDocValues { + + final IndexedDISI disi; + + SparseBinaryDocValues(IndexedDISI disi) { + this.disi = disi; + } + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + } + + @Override + public BinaryDocValues getBinary(FieldInfo field) throws IOException { + BinaryEntry entry = binaries.get(field.name); + if (entry.docsWithFieldOffset == -2) { + return DocValues.emptyBinary(); + } + + final IndexInput bytesSlice = data.slice("fixed-binary", entry.dataOffset, entry.dataLength); + + if (entry.docsWithFieldOffset == -1) { + // dense + if (entry.minLength == entry.maxLength) { + // fixed length + final int length = entry.maxLength; + return new DenseBinaryDocValues(maxDoc) { + final BytesRef bytes = new BytesRef(new byte[length], 0, length); + + @Override + public BytesRef binaryValue() throws IOException { + bytesSlice.seek((long) doc * length); + bytesSlice.readBytes(bytes.bytes, 0, length); + return bytes; + } + }; + } else { + // variable length + final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesData); + return new DenseBinaryDocValues(maxDoc) { + final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength); + + @Override + public BytesRef binaryValue() throws IOException { + long startOffset = addresses.get(doc); + bytes.length = (int) (addresses.get(doc + 1L) - startOffset); + bytesSlice.seek(startOffset); + bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + return bytes; + } + }; + } + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + if (entry.minLength == entry.maxLength) { + // fixed length + final int length = entry.maxLength; + return new SparseBinaryDocValues(disi) { + final BytesRef bytes = new BytesRef(new byte[length], 0, length); + + @Override + public BytesRef binaryValue() throws IOException { + bytesSlice.seek((long) disi.index() * length); + bytesSlice.readBytes(bytes.bytes, 0, length); + return bytes; + } + }; + } else { + // variable length + final RandomAccessInput addressesData = this.data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesData); + return new SparseBinaryDocValues(disi) { + final BytesRef bytes = new BytesRef(new byte[entry.maxLength], 0, entry.maxLength); + + @Override + public BytesRef binaryValue() throws IOException { + final int index = disi.index(); + long startOffset = addresses.get(index); + bytes.length = (int) (addresses.get(index + 1L) - startOffset); + bytesSlice.seek(startOffset); + bytesSlice.readBytes(bytes.bytes, 0, bytes.length); + return bytes; + } + }; + } + } + } + + @Override + public SortedDocValues getSorted(FieldInfo field) throws IOException { + SortedEntry entry = sorted.get(field.name); + return getSorted(entry); + } + + private SortedDocValues getSorted(SortedEntry entry) throws IOException { + if (entry.docsWithFieldOffset == -2) { + return DocValues.emptySorted(); + } + + final LongValues ords; + if (entry.bitsPerValue == 0) { + ords = new LongValues() { + @Override + public long get(long index) { + return 0L; + } + }; + } else { + final RandomAccessInput slice = data.randomAccessSlice(entry.ordsOffset, entry.ordsLength); + ords = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + } + + if (entry.docsWithFieldOffset == -1) { + // dense + return new BaseSortedDocValues(entry, data) { + + int doc = -1; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + return doc = target; + } + + @Override + public boolean advanceExact(int target) { + doc = target; + return true; + } + + @Override + public int ordValue() { + return (int) ords.get(doc); + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new BaseSortedDocValues(entry, data) { + + @Override + public int nextDoc() throws IOException { + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return disi.advanceExact(target); + } + + @Override + public int ordValue() { + return (int) ords.get(disi.index()); + } + }; + } + } + + private abstract static class BaseSortedDocValues extends SortedDocValues { + + final SortedEntry entry; + final IndexInput data; + final TermsEnum termsEnum; + + BaseSortedDocValues(SortedEntry entry, IndexInput data) throws IOException { + this.entry = entry; + this.data = data; + this.termsEnum = termsEnum(); + } + + @Override + public int getValueCount() { + return Math.toIntExact(entry.termsDictSize); + } + + @Override + public BytesRef lookupOrd(int ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public int lookupTerm(BytesRef key) throws IOException { + SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return Math.toIntExact(termsEnum.ord()); + case NOT_FOUND: + case END: + default: + return Math.toIntExact(-1L - termsEnum.ord()); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry, data); + } + } + + private abstract static class BaseSortedSetDocValues extends SortedSetDocValues { + + final SortedSetEntry entry; + final IndexInput data; + final TermsEnum termsEnum; + + BaseSortedSetDocValues(SortedSetEntry entry, IndexInput data) throws IOException { + this.entry = entry; + this.data = data; + this.termsEnum = termsEnum(); + } + + @Override + public long getValueCount() { + return entry.termsDictSize; + } + + @Override + public BytesRef lookupOrd(long ord) throws IOException { + termsEnum.seekExact(ord); + return termsEnum.term(); + } + + @Override + public long lookupTerm(BytesRef key) throws IOException { + SeekStatus status = termsEnum.seekCeil(key); + switch (status) { + case FOUND: + return termsEnum.ord(); + case NOT_FOUND: + case END: + default: + return -1L - termsEnum.ord(); + } + } + + @Override + public TermsEnum termsEnum() throws IOException { + return new TermsDict(entry, data); + } + } + + private static class TermsDict extends BaseTermsEnum { + + final TermsDictEntry entry; + final LongValues blockAddresses; + final IndexInput bytes; + final long blockMask; + final LongValues indexAddresses; + final IndexInput indexBytes; + final BytesRef term; + long ord = -1; + + TermsDict(TermsDictEntry entry, IndexInput data) throws IOException { + this.entry = entry; + RandomAccessInput addressesSlice = data.randomAccessSlice(entry.termsAddressesOffset, entry.termsAddressesLength); + blockAddresses = LegacyDirectMonotonicReader.getInstance(entry.termsAddressesMeta, addressesSlice); + bytes = data.slice("terms", entry.termsDataOffset, entry.termsDataLength); + blockMask = (1L << entry.termsDictBlockShift) - 1; + RandomAccessInput indexAddressesSlice = data.randomAccessSlice( + entry.termsIndexAddressesOffset, + entry.termsIndexAddressesLength + ); + indexAddresses = LegacyDirectMonotonicReader.getInstance(entry.termsIndexAddressesMeta, indexAddressesSlice); + indexBytes = data.slice("terms-index", entry.termsIndexOffset, entry.termsIndexLength); + term = new BytesRef(entry.maxTermLength); + } + + @Override + public BytesRef next() throws IOException { + if (++ord >= entry.termsDictSize) { + return null; + } + if ((ord & blockMask) == 0L) { + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + } else { + final int token = Byte.toUnsignedInt(bytes.readByte()); + int prefixLength = token & 0x0F; + int suffixLength = 1 + (token >>> 4); + if (prefixLength == 15) { + prefixLength += bytes.readVInt(); + } + if (suffixLength == 16) { + suffixLength += bytes.readVInt(); + } + term.length = prefixLength + suffixLength; + bytes.readBytes(term.bytes, prefixLength, suffixLength); + } + return term; + } + + @Override + public void seekExact(long ord) throws IOException { + if (ord < 0 || ord >= entry.termsDictSize) { + throw new IndexOutOfBoundsException(); + } + final long blockIndex = ord >>> entry.termsDictBlockShift; + final long blockAddress = blockAddresses.get(blockIndex); + bytes.seek(blockAddress); + this.ord = (blockIndex << entry.termsDictBlockShift) - 1; + do { + next(); + } while (this.ord < ord); + } + + private BytesRef getTermFromIndex(long index) throws IOException { + assert index >= 0 && index <= (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; + final long start = indexAddresses.get(index); + term.length = (int) (indexAddresses.get(index + 1) - start); + indexBytes.seek(start); + indexBytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekTermsIndex(BytesRef text) throws IOException { + long lo = 0L; + long hi = (entry.termsDictSize - 1) >>> entry.termsDictIndexShift; + while (lo <= hi) { + final long mid = (lo + hi) >>> 1; + getTermFromIndex(mid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + lo = mid + 1; + } else { + hi = mid - 1; + } + } + + assert hi < 0 || getTermFromIndex(hi).compareTo(text) <= 0; + assert hi == ((entry.termsDictSize - 1) >>> entry.termsDictIndexShift) || getTermFromIndex(hi + 1).compareTo(text) > 0; + + return hi; + } + + private BytesRef getFirstTermFromBlock(long block) throws IOException { + assert block >= 0 && block <= (entry.termsDictSize - 1) >>> entry.termsDictBlockShift; + final long blockAddress = blockAddresses.get(block); + bytes.seek(blockAddress); + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + return term; + } + + private long seekBlock(BytesRef text) throws IOException { + long index = seekTermsIndex(text); + if (index == -1L) { + return -1L; + } + + long ordLo = index << entry.termsDictIndexShift; + long ordHi = Math.min(entry.termsDictSize, ordLo + (1L << entry.termsDictIndexShift)) - 1L; + + long blockLo = ordLo >>> entry.termsDictBlockShift; + long blockHi = ordHi >>> entry.termsDictBlockShift; + + while (blockLo <= blockHi) { + final long blockMid = (blockLo + blockHi) >>> 1; + getFirstTermFromBlock(blockMid); + final int cmp = term.compareTo(text); + if (cmp <= 0) { + blockLo = blockMid + 1; + } else { + blockHi = blockMid - 1; + } + } + + assert blockHi < 0 || getFirstTermFromBlock(blockHi).compareTo(text) <= 0; + assert blockHi == ((entry.termsDictSize - 1) >>> entry.termsDictBlockShift) + || getFirstTermFromBlock(blockHi + 1).compareTo(text) > 0; + + return blockHi; + } + + @Override + public SeekStatus seekCeil(BytesRef text) throws IOException { + final long block = seekBlock(text); + if (block == -1) { + // before the first term + seekExact(0L); + return SeekStatus.NOT_FOUND; + } + final long blockAddress = blockAddresses.get(block); + this.ord = block << entry.termsDictBlockShift; + bytes.seek(blockAddress); + term.length = bytes.readVInt(); + bytes.readBytes(term.bytes, 0, term.length); + while (true) { + int cmp = term.compareTo(text); + if (cmp == 0) { + return SeekStatus.FOUND; + } else if (cmp > 0) { + return SeekStatus.NOT_FOUND; + } + if (next() == null) { + return SeekStatus.END; + } + } + } + + @Override + public BytesRef term() throws IOException { + return term; + } + + @Override + public long ord() throws IOException { + return ord; + } + + @Override + public long totalTermFreq() throws IOException { + return -1L; + } + + @Override + public PostingsEnum postings(PostingsEnum reuse, int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public ImpactsEnum impacts(int flags) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int docFreq() throws IOException { + throw new UnsupportedOperationException(); + } + } + + @Override + public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { + SortedNumericEntry entry = sortedNumerics.get(field.name); + if (entry.numValues == entry.numDocsWithField) { + return DocValues.singleton(getNumeric(entry)); + } + + final RandomAccessInput addressesInput = data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesInput); + + final LongValues values = getNumericValues(entry); + + if (entry.docsWithFieldOffset == -1) { + // dense + return new SortedNumericDocValues() { + + int doc = -1; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + doc = target; + return true; + } + + @Override + public long nextValue() throws IOException { + return values.get(start++); + } + + @Override + public int docValueCount() { + return count; + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new SortedNumericDocValues() { + + boolean set; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + set = false; + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + set = false; + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + set = false; + return disi.advanceExact(target); + } + + @Override + public long nextValue() throws IOException { + set(); + return values.get(start++); + } + + @Override + public int docValueCount() { + set(); + return count; + } + + private void set() { + if (set == false) { + final int index = disi.index(); + start = addresses.get(index); + end = addresses.get(index + 1L); + count = (int) (end - start); + set = true; + } + } + }; + } + } + + @Override + public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { + SortedSetEntry entry = sortedSets.get(field.name); + if (entry.singleValueEntry != null) { + return DocValues.singleton(getSorted(entry.singleValueEntry)); + } + + final RandomAccessInput slice = data.randomAccessSlice(entry.ordsOffset, entry.ordsLength); + final LongValues ords = LegacyDirectReader.getInstance(slice, entry.bitsPerValue); + + final RandomAccessInput addressesInput = data.randomAccessSlice(entry.addressesOffset, entry.addressesLength); + final LongValues addresses = LegacyDirectMonotonicReader.getInstance(entry.addressesMeta, addressesInput); + + if (entry.docsWithFieldOffset == -1) { + // dense + return new BaseSortedSetDocValues(entry, data) { + + int doc = -1; + long start, end; + int count; + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int docID() { + return doc; + } + + @Override + public long cost() { + return maxDoc; + } + + @Override + public int advance(int target) throws IOException { + if (target >= maxDoc) { + return doc = NO_MORE_DOCS; + } + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + return doc = target; + } + + @Override + public boolean advanceExact(int target) throws IOException { + start = addresses.get(target); + end = addresses.get(target + 1L); + count = (int) (end - start); + doc = target; + return true; + } + + @Override + public long nextOrd() throws IOException { + if (start == end) { + return NO_MORE_ORDS; + } + return ords.get(start++); + } + + @Override + public int docValueCount() { + return count; + } + }; + } else { + // sparse + final IndexedDISI disi = new IndexedDISI(data, entry.docsWithFieldOffset, entry.docsWithFieldLength, entry.numDocsWithField); + return new BaseSortedSetDocValues(entry, data) { + + boolean set; + long start; + long end = 0; + int count; + + @Override + public int nextDoc() throws IOException { + set = false; + return disi.nextDoc(); + } + + @Override + public int docID() { + return disi.docID(); + } + + @Override + public long cost() { + return disi.cost(); + } + + @Override + public int advance(int target) throws IOException { + set = false; + return disi.advance(target); + } + + @Override + public boolean advanceExact(int target) throws IOException { + set = false; + return disi.advanceExact(target); + } + + private boolean set() { + if (set == false) { + final int index = disi.index(); + start = addresses.get(index); + end = addresses.get(index + 1L); + count = (int) (end - start); + set = true; + return true; + } + return false; + } + + @Override + public long nextOrd() throws IOException { + if (set()) { + return ords.get(start++); + } else if (start == end) { + return NO_MORE_ORDS; + } else { + return ords.get(start++); + } + } + + @Override + public int docValueCount() { + set(); + return count; + } + }; + } + } + + @Override + public void checkIntegrity() throws IOException { + CodecUtil.checksumEntireFile(data); + } + + @Override + public DocValuesSkipper getSkipper(FieldInfo field) { + return null; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec index 6e5205d664f2..0215e9f7ca4a 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec +++ b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.Codec @@ -6,5 +6,6 @@ # org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.BWCLucene70Codec +org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70Codec org.elasticsearch.xpack.lucene.bwc.codecs.lucene62.Lucene62Codec org.elasticsearch.xpack.lucene.bwc.codecs.lucene60.Lucene60Codec diff --git a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat index 2d46b4bca3d0..8d24d86982da 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat +++ b/x-pack/plugin/old-lucene-versions/src/main/resources/META-INF/services/org.apache.lucene.codecs.DocValuesFormat @@ -14,3 +14,4 @@ # limitations under the License. org.elasticsearch.xpack.lucene.bwc.codecs.lucene54.Lucene54DocValuesFormat +org.elasticsearch.xpack.lucene.bwc.codecs.lucene70.Lucene70DocValuesFormat diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java index f62ab9fbc4fe..0b72b96b446d 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/OldCodecsAvailableTests.java @@ -18,7 +18,7 @@ public class OldCodecsAvailableTests extends ESTestCase { * to the next major Lucene version. */ @UpdateForV9(owner = UpdateForV9.Owner.SEARCH_FOUNDATIONS) - @AwaitsFix(bugUrl = "muted until we add bwc codecs as part of lucene 10 upgrade") + @AwaitsFix(bugUrl = "muted until we add bwc codecs to support 7.x indices in Elasticsearch 9.0") public void testLuceneBWCCodecsAvailable() { assertEquals("Add Lucene BWC codecs for Elasticsearch version 7", 8, Version.CURRENT.major); } diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java index 304f7b0c934f..59f5e5de1eff 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene50/BlockPostingsFormat3Tests.java @@ -48,7 +48,9 @@ import org.apache.lucene.tests.util.English; import org.apache.lucene.tests.util.TestUtil; import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CompiledAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.test.ESTestCase; @@ -187,7 +189,11 @@ public class BlockPostingsFormat3Tests extends ESTestCase { int numIntersections = atLeast(3); for (int i = 0; i < numIntersections; i++) { String re = AutomatonTestUtil.randomRegexp(random()); - CompiledAutomaton automaton = new CompiledAutomaton(new RegExp(re, RegExp.NONE).toAutomaton()); + Automaton determinized = Operations.determinize( + new RegExp(re, RegExp.NONE).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); + CompiledAutomaton automaton = new CompiledAutomaton(determinized); if (automaton.type == CompiledAutomaton.AUTOMATON_TYPE.NORMAL) { // TODO: test start term too TermsEnum leftIntersection = leftTerms.intersect(automaton, null); diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java index c819dca3ec6f..1a2aca0d63bd 100644 --- a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene54/Lucene54DocValuesFormatTests.java @@ -10,12 +10,12 @@ package org.elasticsearch.xpack.lucene.bwc.codecs.lucene54; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.tests.index.BaseDocValuesFormatTestCase; +import org.apache.lucene.tests.index.LegacyBaseDocValuesFormatTestCase; import org.apache.lucene.tests.util.TestUtil; import org.elasticsearch.test.GraalVMThreadsFilter; @ThreadLeakFilters(filters = { GraalVMThreadsFilter.class }) -public class Lucene54DocValuesFormatTests extends BaseDocValuesFormatTestCase { +public class Lucene54DocValuesFormatTests extends LegacyBaseDocValuesFormatTestCase { private final Codec codec = TestUtil.alwaysDocValuesFormat(new Lucene54DocValuesFormat()); diff --git a/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java new file mode 100644 index 000000000000..ce645feb854d --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/test/java/org/elasticsearch/xpack/lucene/bwc/codecs/lucene70/Lucene70DocValuesFormatTests.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc.codecs.lucene70; + +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.tests.index.LegacyBaseDocValuesFormatTestCase; +import org.apache.lucene.tests.util.TestUtil; +import org.elasticsearch.test.GraalVMThreadsFilter; + +@ThreadLeakFilters(filters = { GraalVMThreadsFilter.class }) +public class Lucene70DocValuesFormatTests extends LegacyBaseDocValuesFormatTestCase { + + private final Codec codec = TestUtil.alwaysDocValuesFormat(new Lucene70DocValuesFormat()); + + @Override + protected Codec getCodec() { + return codec; + } +} diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java index 48673d200217..f447f67b4cdd 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStackTracesAction.java @@ -179,7 +179,7 @@ public class TransportGetStackTracesAction extends TransportAction { - long sampleCount = searchResponse.getHits().getTotalHits().value; + long sampleCount = searchResponse.getHits().getTotalHits().value(); EventsIndex resampledIndex = mediumDownsampled.getResampledIndex(request.getSampleSize(), sampleCount); log.debug( "User requested [{}] samples, [{}] samples matched in [{}]. Picking [{}]", @@ -220,7 +220,7 @@ public class TransportGetStackTracesAction extends TransportAction { - long sampleCount = searchResponse.getHits().getTotalHits().value; + long sampleCount = searchResponse.getHits().getTotalHits().value(); int requestedSampleCount = request.getSampleSize(); // random sampler aggregation does not support sampling rates between 0.5 and 1.0 -> clamp to 1.0 if (sampleCount <= requestedSampleCount * 2L) { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java index 9dd46e778fb9..dbb4cf4dc685 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/action/TransportGetStatusAction.java @@ -180,7 +180,7 @@ public class TransportGetStatusAction extends TransportMasterNodeAction { - boolean hasData = searchResponse.getHits().getTotalHits().value > 0; + boolean hasData = searchResponse.getHits().getTotalHits().value() > 0; listener.onResponse( new GetStatusAction.Response(pluginEnabled, resourceManagementEnabled, resourcesCreated, anyPre891Data, hasData) ); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java index 8eac03d36371..e4f5810ac89d 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/LikePattern.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.ql.expression.predicate.regex; import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -51,8 +50,7 @@ public class LikePattern extends AbstractStringPattern { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java index 528872ca9b4c..41ae97ec5e4f 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ql.expression.predicate.regex; import org.apache.lucene.util.automaton.Automaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import java.util.Objects; @@ -21,7 +22,10 @@ public class RLikePattern extends AbstractStringPattern { @Override public Automaton createAutomaton() { - return new RegExp(regexpPattern).toAutomaton(); + return Operations.determinize( + new RegExp(regexpPattern, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton(), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); } @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java index fd6bd177e4c6..6703f1aeacbb 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/WildcardPattern.java @@ -9,7 +9,6 @@ package org.elasticsearch.xpack.ql.expression.predicate.regex; import org.apache.lucene.index.Term; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -39,8 +38,7 @@ public class WildcardPattern extends AbstractStringPattern { @Override public Automaton createAutomaton() { - Automaton automaton = WildcardQuery.toAutomaton(new Term(null, wildcard)); - return MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + return WildcardQuery.toAutomaton(new Term(null, wildcard), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); } @Override diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java index b501967524a6..29c471296b5d 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankMultiShardIT.java @@ -218,7 +218,7 @@ public class RRFRankMultiShardIT extends ESIntegTestCase { .addFetchField("text0") .setSize(19), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -355,7 +355,7 @@ public class RRFRankMultiShardIT extends ESIntegTestCase { .setSize(11) .addAggregation(AggregationBuilders.terms("sums").field("int")), response -> { - assertEquals(101, response.getHits().getTotalHits().value); + assertEquals(101, response.getHits().getTotalHits().value()); assertEquals(11, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -483,7 +483,7 @@ public class RRFRankMultiShardIT extends ESIntegTestCase { .addAggregation(AggregationBuilders.terms("sums").field("int")) .setStats("search"), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java index 7269d9c3e5e7..ed26aa50ffa6 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRankSingleShardIT.java @@ -217,7 +217,7 @@ public class RRFRankSingleShardIT extends ESSingleNodeTestCase { .addFetchField("text0") .setSize(19), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -356,7 +356,7 @@ public class RRFRankSingleShardIT extends ESSingleNodeTestCase { .setSize(11) .addAggregation(AggregationBuilders.terms("sums").field("int")), response -> { - assertEquals(101, response.getHits().getTotalHits().value); + assertEquals(101, response.getHits().getTotalHits().value()); assertEquals(11, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); @@ -486,7 +486,7 @@ public class RRFRankSingleShardIT extends ESSingleNodeTestCase { .addAggregation(AggregationBuilders.terms("sums").field("int")) .setStats("search"), response -> { - assertEquals(51, response.getHits().getTotalHits().value); + assertEquals(51, response.getHits().getTotalHits().value()); assertEquals(19, response.getHits().getHits().length); SearchHit hit = response.getHits().getAt(0); diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java index edd5e557aadf..c5978219d94d 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderIT.java @@ -198,8 +198,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, lessThanOrEqualTo(size)); for (int k = 0; k < Math.min(size, resp.getHits().getHits().length); k++) { assertThat(resp.getHits().getAt(k).getId(), equalTo(expectedDocIds.get(k + fDocs_to_fetch))); @@ -249,8 +249,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(1)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); @@ -308,8 +308,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(4)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); @@ -366,8 +366,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(4)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_6")); @@ -441,8 +441,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_7")); assertThat(resp.getHits().getAt(1).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(2).getId(), equalTo("doc_6")); @@ -493,8 +493,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(1)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_2")); assertThat(resp.getHits().getAt(0).getExplanation().isMatch(), equalTo(true)); @@ -564,8 +564,8 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(6L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(6L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getHits().length, equalTo(1)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat(resp.getHits().getAt(0).getExplanation().isMatch(), equalTo(true)); @@ -733,14 +733,14 @@ public class RRFRetrieverBuilderIT extends ESIntegTestCase { ); assertResponse( client().prepareSearch(INDEX).setSource(new SearchSourceBuilder().retriever(rrf)), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(4L)) ); assertThat(numAsyncCalls.get(), equalTo(2)); // check that we use the rewritten vector to build the explain query assertResponse( client().prepareSearch(INDEX).setSource(new SearchSourceBuilder().retriever(rrf).explain(true)), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(4L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(4L)) ); assertThat(numAsyncCalls.get(), equalTo(4)); } diff --git a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java index 69c61fe3bca1..b1358f11bf63 100644 --- a/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java +++ b/x-pack/plugin/rank-rrf/src/internalClusterTest/java/org/elasticsearch/xpack/rank/rrf/RRFRetrieverBuilderNestedDocsIT.java @@ -167,8 +167,8 @@ public class RRFRetrieverBuilderNestedDocsIT extends RRFRetrieverBuilderIT { ElasticsearchAssertions.assertResponse(req, resp -> { assertNull(resp.pointInTimeId()); assertNotNull(resp.getHits().getTotalHits()); - assertThat(resp.getHits().getTotalHits().value, equalTo(3L)); - assertThat(resp.getHits().getTotalHits().relation, equalTo(TotalHits.Relation.EQUAL_TO)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(3L)); + assertThat(resp.getHits().getTotalHits().relation(), equalTo(TotalHits.Relation.EQUAL_TO)); assertThat(resp.getHits().getAt(0).getId(), equalTo("doc_6")); assertThat((double) resp.getHits().getAt(0).getScore(), closeTo(0.1742, 1e-4)); assertThat( diff --git a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java index 2a17a4a1152c..8df4e3a8dbea 100644 --- a/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java +++ b/x-pack/plugin/search-business-rules/src/internalClusterTest/java/org/elasticsearch/xpack/searchbusinessrules/PinnedQueryBuilderIT.java @@ -120,7 +120,7 @@ public class PinnedQueryBuilderIT extends ESIntegTestCase { assertResponse( prepareSearch().setQuery(pqb).setTrackTotalHits(true).setSize(size).setFrom(from).setSearchType(DFS_QUERY_THEN_FETCH), response -> { - long numHits = response.getHits().getTotalHits().value; + long numHits = response.getHits().getTotalHits().value(); assertThat(numHits, lessThanOrEqualTo((long) numRelevantDocs + pins.size())); // Check pins are sorted by increasing score, (unlike organic, there are no duplicate scores) @@ -193,7 +193,7 @@ public class PinnedQueryBuilderIT extends ESIntegTestCase { private void assertExhaustiveScoring(PinnedQueryBuilder pqb) { assertResponse(prepareSearch().setQuery(pqb).setTrackTotalHits(true).setSearchType(DFS_QUERY_THEN_FETCH), response -> { - long numHits = response.getHits().getTotalHits().value; + long numHits = response.getHits().getTotalHits().value(); assertThat(numHits, equalTo(2L)); }); } diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java index d9e65c385c61..2370a3dee6d0 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreQuery.java @@ -79,12 +79,10 @@ public final class CappedScoreQuery extends Query { */ protected static class CappedBulkScorer extends BulkScorer { final BulkScorer bulkScorer; - final Weight weight; final float maxScore; - public CappedBulkScorer(BulkScorer bulkScorer, Weight weight, float maxScore) { + public CappedBulkScorer(BulkScorer bulkScorer, float maxScore) { this.bulkScorer = bulkScorer; - this.weight = weight; this.maxScore = maxScore; } @@ -125,15 +123,6 @@ public final class CappedScoreQuery extends Query { final Weight innerWeight = searcher.createWeight(query, scoreMode, boost); if (scoreMode.needsScores()) { return new CappedScoreWeight(this, innerWeight, maxScore) { - @Override - public BulkScorer bulkScorer(LeafReaderContext context) throws IOException { - final BulkScorer innerScorer = innerWeight.bulkScorer(context); - if (innerScorer == null) { - return null; - } - return new CappedBulkScorer(innerScorer, this, maxScore); - } - @Override public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { ScorerSupplier innerScorerSupplier = innerWeight.scorerSupplier(context); @@ -152,7 +141,13 @@ public final class CappedScoreQuery extends Query { return innerScorer; } } - return new CappedScorer(innerWeight, innerScorer, maxScore); + return new CappedScorer(innerScorer, maxScore); + } + + @Override + public BulkScorer bulkScorer() throws IOException { + final BulkScorer innerScorer = innerScorerSupplier.bulkScorer(); + return new CappedBulkScorer(innerScorer, maxScore); } @Override @@ -166,15 +161,6 @@ public final class CappedScoreQuery extends Query { public Matches matches(LeafReaderContext context, int doc) throws IOException { return innerWeight.matches(context, doc); } - - @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - ScorerSupplier scorerSupplier = scorerSupplier(context); - if (scorerSupplier == null) { - return null; - } - return scorerSupplier.get(Long.MAX_VALUE); - } }; } else { return innerWeight; diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java index 6ad3b9ce4ef8..ccc90e8f671a 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScoreWeight.java @@ -11,6 +11,7 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.Explanation; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; @@ -36,8 +37,22 @@ public abstract class CappedScoreWeight extends Weight { } @Override - public Scorer scorer(LeafReaderContext context) throws IOException { - return new CappedScorer(this, innerWeight.scorer(context), maxScore); + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { + ScorerSupplier innerScorerSupplier = innerWeight.scorerSupplier(context); + if (innerScorerSupplier == null) { + return null; + } + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + return new CappedScorer(innerScorerSupplier.get(leadCost), maxScore); + } + + @Override + public long cost() { + return innerScorerSupplier.cost(); + } + }; } @Override diff --git a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java index 57b2b62b77f6..67813588ba3b 100644 --- a/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java +++ b/x-pack/plugin/search-business-rules/src/main/java/org/elasticsearch/xpack/searchbusinessrules/CappedScorer.java @@ -9,15 +9,14 @@ package org.elasticsearch.xpack.searchbusinessrules; import org.apache.lucene.search.FilterScorer; import org.apache.lucene.search.Scorer; -import org.apache.lucene.search.Weight; import java.io.IOException; public class CappedScorer extends FilterScorer { private final float maxScore; - public CappedScorer(Weight weight, Scorer delegate, float maxScore) { - super(delegate, weight); + public CappedScorer(Scorer delegate, float maxScore) { + super(delegate); this.maxScore = maxScore; } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java index faf41e7e655a..eab73fbe5ad0 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsCanMatchOnCoordinatorIntegTests.java @@ -305,7 +305,7 @@ public class SearchableSnapshotsCanMatchOnCoordinatorIntegTests extends BaseFroz assertThat(newSearchResponse.getSuccessfulShards(), equalTo(totalShards)); assertThat(newSearchResponse.getFailedShards(), equalTo(0)); assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo((long) numDocsWithinRange)); + assertThat(newSearchResponse.getHits().getTotalHits().value(), equalTo((long) numDocsWithinRange)); }); // test with SearchShardsAPI @@ -655,7 +655,7 @@ public class SearchableSnapshotsCanMatchOnCoordinatorIntegTests extends BaseFroz assertThat(searchResponse.getFailedShards(), equalTo(indexOutsideSearchRangeShardCount)); assertThat(searchResponse.getSkippedShards(), equalTo(searchableSnapshotShardCount)); assertThat(searchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); }); } @@ -736,7 +736,7 @@ public class SearchableSnapshotsCanMatchOnCoordinatorIntegTests extends BaseFroz // a shard that's available in order to construct the search response assertThat(newSearchResponse.getSkippedShards(), equalTo(totalShards - 1)); assertThat(newSearchResponse.getTotalShards(), equalTo(totalShards)); - assertThat(newSearchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(newSearchResponse.getHits().getTotalHits().value(), equalTo(0L)); }); }); @@ -850,7 +850,7 @@ public class SearchableSnapshotsCanMatchOnCoordinatorIntegTests extends BaseFroz SearchResponse response = client().search(request).actionGet(); logger.info( "[TEST DEBUG INFO] Search hits: {} Successful shards: {}, failed shards: {}, skipped shards: {}, total shards: {}", - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), response.getSuccessfulShards(), response.getFailedShards(), response.getSkippedShards(), diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java index 7615723860cf..9888afdd1649 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRecoverFromSnapshotIntegTests.java @@ -79,7 +79,7 @@ public class SearchableSnapshotsRecoverFromSnapshotIntegTests extends BaseSearch ensureGreen(restoredIndexName); - assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value()); mockLog.assertAllExpectationsMatched(); } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java index a3da932398fb..1e76477378da 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsRepositoryIntegTests.java @@ -66,7 +66,7 @@ public class SearchableSnapshotsRepositoryIntegTests extends BaseFrozenSearchabl Storage storage = randomFrom(Storage.values()); String restoredIndexName = (storage == Storage.FULL_COPY ? "fully-mounted-" : "partially-mounted-") + indexName + '-' + i; mountSnapshot(repositoryName, snapshotName, indexName, restoredIndexName, Settings.EMPTY, storage); - assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(restoredIndexName).setTrackTotalHits(true), totalHits.value()); mountedIndices[i] = restoredIndexName; } @@ -183,7 +183,7 @@ public class SearchableSnapshotsRepositoryIntegTests extends BaseFrozenSearchabl ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value()); final String mountedAgain = randomValueOtherThan(mounted, () -> randomAlphaOfLength(10).toLowerCase(Locale.ROOT)); final SnapshotRestoreException exception = expectThrows( @@ -208,7 +208,7 @@ public class SearchableSnapshotsRepositoryIntegTests extends BaseFrozenSearchabl ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mountedAgain).setTrackTotalHits(true), totalHits.value()); assertAcked(indicesAdmin().prepareDelete(mountedAgain)); assertAcked(indicesAdmin().prepareDelete(mounted)); @@ -240,7 +240,7 @@ public class SearchableSnapshotsRepositoryIntegTests extends BaseFrozenSearchabl ? equalTo(Boolean.toString(deleteSnapshot)) : nullValue() ); - assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value); + assertHitCount(prepareSearch(mounted).setTrackTotalHits(true), totalHits.value()); if (randomBoolean()) { assertAcked(indicesAdmin().prepareClose(mounted)); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java index 40b7e08936fa..7eaf5d8f060c 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests.java @@ -179,7 +179,7 @@ public class SearchableSnapshotsBlobStoreCacheMaintenanceIntegTests extends Base ) .setSize(0), res -> { - final long remainingEntriesInCache = res.getHits().getTotalHits().value; + final long remainingEntriesInCache = res.getHits().getTotalHits().value(); if (indicesToDelete.contains(mountedIndex)) { assertThat(remainingEntriesInCache, equalTo(0L)); } else if (snapshotId.equals(SNAPSHOT_SNAPSHOT_ID_SETTING.get(indexSettings))) { diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java index a21e3e6beabc..21e67212f1f5 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java @@ -549,7 +549,7 @@ public class BlobStoreCacheMaintenanceService implements ClusterStateListener { try (listeners) { executeSearch(new SearchRequest().source(getSearchSourceBuilder().trackTotalHits(true)), (searchResponse, refs) -> { assert total.get() == 0L; - total.set(searchResponse.getHits().getTotalHits().value); + total.set(searchResponse.getHits().getTotalHits().value()); handleSearchResponse(searchResponse, refs); }); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java index da08c6b38819..a7fb5571995b 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java @@ -23,6 +23,7 @@ import org.apache.lucene.index.KeepOnlyLastCommitDeletionPolicy; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SerialMergeScheduler; +import org.apache.lucene.index.StoredFields; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; @@ -173,9 +174,10 @@ public class PersistentCache implements Closeable { final Bits liveDocs = leafReaderContext.reader().getLiveDocs(); final IntPredicate isLiveDoc = liveDocs == null ? i -> true : liveDocs::get; final DocIdSetIterator docIdSetIterator = scorer.iterator(); + StoredFields storedFields = leafReaderContext.reader().storedFields(); while (docIdSetIterator.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) { if (isLiveDoc.test(docIdSetIterator.docID())) { - final Document document = leafReaderContext.reader().document(docIdSetIterator.docID()); + final Document document = storedFields.document(docIdSetIterator.docID()); final String cacheFileId = getValue(document, CACHE_ID_FIELD); if (predicate.test(snapshotCacheDir.resolve(cacheFileId))) { long size = buildCacheFileRanges(document).stream().mapToLong(ByteRange::length).sum(); @@ -423,9 +425,10 @@ public class PersistentCache implements Closeable { for (LeafReaderContext leafReaderContext : indexReader.leaves()) { final LeafReader leafReader = leafReaderContext.reader(); final Bits liveDocs = leafReader.getLiveDocs(); + final StoredFields storedFields = leafReader.storedFields(); for (int i = 0; i < leafReader.maxDoc(); i++) { if (liveDocs == null || liveDocs.get(i)) { - final Document document = leafReader.document(i); + final Document document = storedFields.document(i); logger.trace("loading document [{}]", document); documents.put(getValue(document, CACHE_ID_FIELD), document); } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java index 81cf205c13dd..4711043fff28 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/store/input/CachedBlobContainerIndexInput.java @@ -10,6 +10,7 @@ package org.elasticsearch.xpack.searchablesnapshots.store.input; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.ReadAdvice; import org.elasticsearch.blobcache.BlobCacheUtils; import org.elasticsearch.blobcache.common.ByteRange; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshot.FileInfo; @@ -35,7 +36,7 @@ public class CachedBlobContainerIndexInput extends MetadataCachingIndexInput { * a complete part of the {@link #fileInfo} at once in the cache and should not be * used for anything else than what the {@link #prefetchPart(int, Supplier)} method does. */ - public static final IOContext CACHE_WARMING_CONTEXT = new IOContext(); + public static final IOContext CACHE_WARMING_CONTEXT = new IOContext(IOContext.Context.DEFAULT, null, null, ReadAdvice.NORMAL); private static final Logger logger = LogManager.getLogger(CachedBlobContainerIndexInput.class); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java index 41121453e41a..4ee2bf7e6563 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/AbstractSearchableSnapshotsTestCase.java @@ -348,8 +348,8 @@ public abstract class AbstractSearchableSnapshotsTestCase extends ESIndexInputTe * uses a different buffer size for them. */ public static IOContext randomIOContext() { - final IOContext ioContext = randomFrom(IOContext.DEFAULT, IOContext.READ, IOContext.READONCE); - assert ioContext.context != IOContext.Context.MERGE; + final IOContext ioContext = randomFrom(IOContext.DEFAULT, IOContext.READONCE); + assert ioContext.context() != IOContext.Context.MERGE; return ioContext; } } diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java index c97d6cb4cab0..eab6f1a629f3 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/InMemoryNoOpCommitDirectoryTests.java @@ -179,7 +179,7 @@ public class InMemoryNoOpCommitDirectoryTests extends ESTestCase { final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1); assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO))); assertThat(topDocs.scoreDocs.length, equalTo(1)); - assertThat(directoryReader.document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); + assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); } try (IndexWriter indexWriter = new IndexWriter(inMemoryNoOpCommitDirectory, new IndexWriterConfig())) { @@ -226,7 +226,7 @@ public class InMemoryNoOpCommitDirectoryTests extends ESTestCase { final TopDocs topDocs = newSearcher(directoryReader).search(new MatchAllDocsQuery(), 1); assertThat(topDocs.totalHits, equalTo(new TotalHits(1L, TotalHits.Relation.EQUAL_TO))); assertThat(topDocs.scoreDocs.length, equalTo(1)); - assertThat(directoryReader.document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); + assertThat(directoryReader.storedFields().document(topDocs.scoreDocs[0].doc).getField("foo").stringValue(), equalTo("bar")); } assertEquals(1, DirectoryReader.listCommits(inMemoryNoOpCommitDirectory).size()); diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java index e65c4a60f89d..98df96eca777 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/store/SearchableSnapshotDirectoryTests.java @@ -401,9 +401,9 @@ public class SearchableSnapshotDirectoryTests extends AbstractSearchableSnapshot false, // no prewarming in this test because we want to ensure that files are accessed on purpose (directory, snapshotDirectory) -> { for (String fileName : randomSubsetOf(Arrays.asList(snapshotDirectory.listAll()))) { - final long checksum; - try (IndexInput input = directory.openInput(fileName, Store.READONCE_CHECKSUM)) { - checksum = CodecUtil.checksumEntireFile(input); + final long expectedChecksum; + try (IndexInput input = directory.openInput(fileName, IOContext.READONCE)) { + expectedChecksum = CodecUtil.checksumEntireFile(input); } final long snapshotChecksum; @@ -418,9 +418,9 @@ public class SearchableSnapshotDirectoryTests extends AbstractSearchableSnapshot } assertThat( - "Expected checksum [" + checksum + "] but got [" + snapshotChecksum + ']', + "Expected checksum [" + expectedChecksum + "] but got [" + snapshotChecksum + ']', snapshotChecksum, - equalTo(checksum) + equalTo(expectedChecksum) ); assertThat( "File [" + fileName + "] should have been read from heap", diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java index 6ffa09dc1f26..6d9110b56486 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java @@ -224,7 +224,7 @@ public abstract class AbstractRemoteClusterSecurityDlsAndFlsRestIT extends Abstr assertOK(response); SearchResponse searchResponse = SearchResponseUtils.responseAsSearchResponse(response); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java index 767452e6fcae..4b994ce82d92 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java @@ -282,7 +282,7 @@ public class RemoteClusterSecurityCcrIT extends AbstractRemoteClusterSecurityTes searchResponse = SearchResponseUtils.parseSearchResponse(parser); } try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numberOfDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(numberOfDocs)); assertThat( Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toUnmodifiableSet()), equalTo(Set.of(indices)) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java index d5e77c169464..1602a097b1b0 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrMigrationIT.java @@ -362,7 +362,7 @@ public class RemoteClusterSecurityCcrMigrationIT extends AbstractRemoteClusterSe assertOK(response); final SearchResponse searchResponse = SearchResponseUtils.parseSearchResponse(responseAsParser(response)); try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numberOfDocs)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(numberOfDocs)); assertThat( Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toUnmodifiableSet()), equalTo(Set.of(indices)) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java index 8b18359fb831..1345e275fab1 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityMutualTlsIT.java @@ -119,7 +119,7 @@ public class RemoteClusterSecurityMutualTlsIT extends AbstractRemoteClusterSecur responseAsParser(performRequestWithRemoteMetricUser(metricSearchRequest)) ); try { - assertThat(metricSearchResponse.getHits().getTotalHits().value, equalTo(4L)); + assertThat(metricSearchResponse.getHits().getTotalHits().value(), equalTo(4L)); assertThat( Arrays.stream(metricSearchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toSet()), containsInAnyOrder("shared-metrics") diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java index 69331fa44811..4cbd1cab21af 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityRestIT.java @@ -419,7 +419,7 @@ public class RemoteClusterSecurityRestIT extends AbstractRemoteClusterSecurityTe responseAsParser(performRequestWithRemoteMetricUser(metricSearchRequest)) ); try { - assertThat(metricSearchResponse.getHits().getTotalHits().value, equalTo(4L)); + assertThat(metricSearchResponse.getHits().getTotalHits().value(), equalTo(4L)); assertThat( Arrays.stream(metricSearchResponse.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toSet()), containsInAnyOrder("shared-metrics") diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java index 505b82b39b96..53c622898476 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecuritySpecialUserIT.java @@ -230,7 +230,7 @@ public class RemoteClusterSecuritySpecialUserIT extends AbstractRemoteClusterSec Arrays.stream(searchResponse5.getHits().getHits()).map(SearchHit::getIndex).collect(Collectors.toList()), containsInAnyOrder(".security-7") ); - assertThat(searchResponse5.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(searchResponse5.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); } finally { searchResponse5.decRef(); } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java index 3871029b3b44..6fa3ef1b4ef6 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityTopologyRestIT.java @@ -134,7 +134,7 @@ public class RemoteClusterSecurityTopologyRestIT extends AbstractRemoteClusterSe final Request scrollRequest = new Request("GET", "/_search/scroll"); final String scrollId; try { - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L)); assertThat(Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).toList(), contains("shared-metrics")); documentFieldValues.add(searchResponse.getHits().getHits()[0].getSourceAsMap().get("name")); scrollId = searchResponse.getScrollId(); @@ -153,7 +153,7 @@ public class RemoteClusterSecurityTopologyRestIT extends AbstractRemoteClusterSe responseAsParser(performRequestWithRemoteMetricUser(scrollRequest)) ); try { - assertThat(scrollResponse.getHits().getTotalHits().value, equalTo(6L)); + assertThat(scrollResponse.getHits().getTotalHits().value(), equalTo(6L)); assertThat( Arrays.stream(scrollResponse.getHits().getHits()).map(SearchHit::getIndex).toList(), contains("shared-metrics") diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java index b1a76a455981..9a1d653132d2 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DateMathExpressionIntegTests.java @@ -81,7 +81,7 @@ public class DateMathExpressionIntegTests extends SecurityIntegTestCase { assertResponse( client.prepareMultiSearch().add(client.prepareSearch(expression).setQuery(QueryBuilders.matchAllQuery()).request()), - multiSearchResponse -> assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)) + multiSearchResponse -> assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)) ); UpdateResponse updateResponse = client.prepareUpdate(expression, response.getId()) diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java index b0572b265a45..a5f827c2a4b5 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DlsFlsRequestCacheTests.java @@ -437,7 +437,7 @@ public class DlsFlsRequestCacheTests extends SecuritySingleNodeTestCase { var searchResponse = requestBuilder.get(); try { assertThat(searchResponse.getFailedShards(), equalTo(0)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) docIds.size())); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) docIds.size())); final SearchHit[] hits = searchResponse.getHits().getHits(); assertThat(Arrays.stream(hits).map(SearchHit::getId).collect(Collectors.toUnmodifiableSet()), equalTo(docIds)); if (fieldNames != null) { diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java index 1bf7d8934775..73aebd4fc7db 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityRandomTests.java @@ -116,7 +116,7 @@ public class DocumentLevelSecurityRandomTests extends SecurityIntegTestCase { client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user" + roleI, USERS_PASSWD))) .prepareSearch("test"), searchResponse1 -> assertResponse(prepareSearch("alias" + role), searchResponse2 -> { - assertThat(searchResponse1.getHits().getTotalHits().value, equalTo(searchResponse2.getHits().getTotalHits().value)); + assertThat(searchResponse1.getHits().getTotalHits().value(), equalTo(searchResponse2.getHits().getTotalHits().value())); for (int hitI = 0; hitI < searchResponse1.getHits().getHits().length; hitI++) { assertThat(searchResponse1.getHits().getAt(hitI).getId(), equalTo(searchResponse2.getHits().getAt(hitI).getId())); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index c0866fa7ea69..87ca7d279c70 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -474,13 +474,13 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -495,13 +495,13 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(2)); @@ -522,7 +522,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { ), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(2L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(2L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -531,7 +531,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { assertThat(response.getResponses()[0].getResponse().getHits().getAt(1).getSourceAsMap().get("id"), is(2)); assertFalse(response.getResponses()[1].isFailure()); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(2L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(2L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("id"), is(1)); @@ -898,7 +898,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { .addFetchField("field1") .setSize(10), response -> { - assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getTotalHits().value()); assertEquals(5, response.getHits().getHits().length); for (SearchHit hit : response.getHits().getHits()) { assertNotNull(hit.field("field1")); @@ -914,7 +914,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { .addFetchField("field2") .setSize(10), response -> { - assertEquals(5, response.getHits().getTotalHits().value); + assertEquals(5, response.getHits().getTotalHits().value()); assertEquals(5, response.getHits().getHits().length); for (SearchHit hit : response.getHits().getHits()) { assertNotNull(hit.field("field2")); @@ -929,7 +929,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { .setQuery(query) .setSize(10), response -> { - assertEquals(10, response.getHits().getTotalHits().value); + assertEquals(10, response.getHits().getTotalHits().value()); assertEquals(10, response.getHits().getHits().length); } ); @@ -1265,7 +1265,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { .get(); do { assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, is((long) numVisible)); + assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1325,7 +1325,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase { .setQuery(termQuery("field1", "value1")) .get(); assertNoFailures(response); - assertThat(response.getHits().getTotalHits().value, is((long) numVisible)); + assertThat(response.getHits().getTotalHits().value(), is((long) numVisible)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java index 34eecd57b53d..01020a428c31 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityRandomTests.java @@ -208,7 +208,7 @@ public class FieldLevelSecurityRandomTests extends SecurityIntegTestCase { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field1", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); @@ -231,7 +231,7 @@ public class FieldLevelSecurityRandomTests extends SecurityIntegTestCase { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field2", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); @@ -254,7 +254,7 @@ public class FieldLevelSecurityRandomTests extends SecurityIntegTestCase { prepareSearch("test").addSort("id", SortOrder.ASC) .setQuery(QueryBuilders.boolQuery().should(QueryBuilders.termQuery("field3", "value"))), expected -> { - assertThat(actual.getHits().getTotalHits().value, equalTo(expected.getHits().getTotalHits().value)); + assertThat(actual.getHits().getTotalHits().value(), equalTo(expected.getHits().getTotalHits().value())); assertThat(actual.getHits().getHits().length, equalTo(expected.getHits().getHits().length)); for (int i = 0; i < actual.getHits().getHits().length; i++) { assertThat(actual.getHits().getAt(i).getId(), equalTo(expected.getHits().getAt(i).getId())); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index bffa53b1f4da..66c8c0a5b1b5 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -956,10 +956,10 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); } @@ -975,10 +975,10 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); } @@ -993,11 +993,11 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1013,9 +1013,9 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(0)); } ); @@ -1029,12 +1029,12 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1051,12 +1051,12 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1073,12 +1073,12 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field3"), is("value3")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(3)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1095,11 +1095,11 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .add(prepareSearch("test2").setQuery(QueryBuilders.matchAllQuery())), response -> { assertFalse(response.getResponses()[0].isFailure()); - assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[0].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[0].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); - assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value, is(1L)); + assertThat(response.getResponses()[1].getResponse().getHits().getTotalHits().value(), is(1L)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().size(), is(2)); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); assertThat(response.getResponses()[1].getResponse().getHits().getAt(0).getSourceAsMap().get("field2"), is("value2")); @@ -1132,7 +1132,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .get(); do { - assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1191,7 +1191,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .setQuery(constantScoreQuery(termQuery("field1", "value1"))) .setFetchSource(true), response -> { - assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(response.getHits().getHits().length, is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1281,7 +1281,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .setSize(1) .setFetchSource(true) .get(); - assertThat(user2SearchResponse.getHits().getTotalHits().value, is((long) 0)); + assertThat(user2SearchResponse.getHits().getTotalHits().value(), is((long) 0)); assertThat(user2SearchResponse.getHits().getHits().length, is(0)); } else { user2SearchResponse.decRef(); @@ -1289,7 +1289,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { user2SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user2", USERS_PASSWD)) ).prepareSearchScroll(user2SearchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(10L)).get(); - assertThat(user2SearchResponse.getHits().getTotalHits().value, is((long) 0)); + assertThat(user2SearchResponse.getHits().getTotalHits().value(), is((long) 0)); assertThat(user2SearchResponse.getHits().getHits().length, is(0)); if (randomBoolean()) { // maybe reuse the scroll even if empty @@ -1309,7 +1309,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .setSize(1) .setFetchSource(true) .get(); - assertThat(user1SearchResponse.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); assertThat(user1SearchResponse.getHits().getHits().length, is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); @@ -1319,7 +1319,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { user1SearchResponse = client().filterWithHeader( Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)) ).prepareSearchScroll(user1SearchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(10L)).get(); - assertThat(user1SearchResponse.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(user1SearchResponse.getHits().getTotalHits().value(), is((long) numDocs)); if (scrolledDocsUser1 < numDocs) { assertThat(user1SearchResponse.getHits().getHits().length, is(1)); assertThat(user1SearchResponse.getHits().getAt(0).getSourceAsMap().size(), is(1)); @@ -2042,7 +2042,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .setQuery(hasChildQuery("child", termQuery("field1", "yellow"), ScoreMode.None)), searchResponse -> { assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); } ); @@ -2061,7 +2061,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase { .setQuery(hasChildQuery("child", termQuery("alias", "yellow"), ScoreMode.None)), searchResponse -> { assertHitCount(searchResponse, 1L); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("p1")); } ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java index d4375d15e6a6..7d99d5817bdc 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/KibanaUserRoleIntegTests.java @@ -103,20 +103,20 @@ public class KibanaUserRoleIntegTests extends NativeRealmIntegTestCase { indexRandom(true, prepareIndex(index).setSource(field, "bar")); assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), response -> { - final long hits = response.getHits().getTotalHits().value; + final long hits = response.getHits().getTotalHits().value(); assertThat(hits, greaterThan(0L)); assertResponse( client().filterWithHeader( singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) ).prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), - response2 -> assertEquals(response2.getHits().getTotalHits().value, hits) + response2 -> assertEquals(response2.getHits().getTotalHits().value(), hits) ); final long multiHits; MultiSearchResponse multiSearchResponse = client().prepareMultiSearch() .add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())) .get(); try { - multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value; + multiHits = multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(); assertThat(hits, greaterThan(0L)); } finally { multiSearchResponse.decRef(); @@ -125,7 +125,7 @@ public class KibanaUserRoleIntegTests extends NativeRealmIntegTestCase { singletonMap("Authorization", UsernamePasswordToken.basicAuthHeaderValue("kibana_user", USERS_PASSWD)) ).prepareMultiSearch().add(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery())).get(); try { - assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value, multiHits); + assertEquals(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits().value(), multiHits); } finally { multiSearchResponse.decRef(); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java index af54f71779f0..6f8ea0f103a5 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java @@ -312,7 +312,7 @@ public class MultipleIndicesPermissionsTests extends SecurityIntegTestCase { assertResponse( userAClient.prepareSearch("alias1").setSize(0), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L)) ); final ElasticsearchSecurityException e1 = expectThrows( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java index 78146e58e91e..e178f4bf3eb6 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/NativeRealmIntegTests.java @@ -343,7 +343,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { String token = basicAuthHeaderValue(username, new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); assertClusterHealthOnlyAuthorizesWhenAnonymousRoleActive(token); @@ -366,7 +366,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); preparePutUser("joe", "s3krit-password2", hasher, SecuritySettingsSource.TEST_ROLE).get(); @@ -382,7 +382,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { token = basicAuthHeaderValue("joe", new SecureString("s3krit-password2")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); } @@ -403,7 +403,7 @@ public class NativeRealmIntegTests extends NativeRealmIntegTestCase { String token = basicAuthHeaderValue("joe", new SecureString("s3krit-password")); assertResponse( client().filterWithHeader(Collections.singletonMap("Authorization", token)).prepareSearch("idx"), - searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value) + searchResp -> assertEquals(1L, searchResp.getHits().getTotalHits().value()) ); DeleteUserResponse response = new DeleteUserRequestBuilder(client()).username("joe").get(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java index f34983f7f125..0acc281dd844 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java @@ -213,7 +213,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); @@ -231,7 +231,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertFalse(multiSearchResponse.getResponses()[1].isFailure()); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); @@ -249,7 +249,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); @@ -267,7 +267,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertFalse(multiSearchResponse.getResponses()[1].isFailure()); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); @@ -317,7 +317,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertNoSearchHits(multiSearchResponse.getResponses()[1].getResponse()); } @@ -336,7 +336,7 @@ public class ReadActionsTests extends SecurityIntegTestCase { assertEquals(2, multiSearchResponse.getResponses().length); assertFalse(multiSearchResponse.getResponses()[0].isFailure()); SearchResponse searchResponse = multiSearchResponse.getResponses()[0].getResponse(); - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); assertReturnedIndices(searchResponse, "test1", "test2", "test3"); assertTrue(multiSearchResponse.getResponses()[1].isFailure()); Exception exception = multiSearchResponse.getResponses()[1].getFailure(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java index eb7c5e5276c1..a4cadeb953e1 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SecurityScrollTests.java @@ -48,13 +48,13 @@ public class SecurityScrollTests extends SecurityIntegTestCase { indexRandom(true, docs); assertResponse(prepareSearch("foo").setScroll(TimeValue.timeValueSeconds(5L)).setQuery(matchAllQuery()).setSize(1), response -> { - assertEquals(numDocs, response.getHits().getTotalHits().value); + assertEquals(numDocs, response.getHits().getTotalHits().value()); assertEquals(1, response.getHits().getHits().length); if (randomBoolean()) { assertResponse( client().prepareSearchScroll(response.getScrollId()).setScroll(TimeValue.timeValueSeconds(5L)), response2 -> { - assertEquals(numDocs, response2.getHits().getTotalHits().value); + assertEquals(numDocs, response2.getHits().getTotalHits().value()); assertEquals(1, response2.getHits().getHits().length); } ); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java index 4b8fbfd41acd..437fb7635117 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileIntegTests.java @@ -856,7 +856,7 @@ public class ProfileIntegTests extends AbstractProfileIntegTestCase { final SuggestProfilesRequest suggestProfilesRequest = new SuggestProfilesRequest(dataKeys, name, 10, hint); final SuggestProfilesResponse suggestProfilesResponse = client().execute(SuggestProfilesAction.INSTANCE, suggestProfilesRequest) .actionGet(); - assertThat(suggestProfilesResponse.getTotalHits().relation, is(TotalHits.Relation.EQUAL_TO)); + assertThat(suggestProfilesResponse.getTotalHits().relation(), is(TotalHits.Relation.EQUAL_TO)); return suggestProfilesResponse.getProfileHits(); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index c4cf3127b897..03558e72fdca 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -2254,7 +2254,7 @@ public class ApiKeyService implements Closeable { TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - long total = searchResponse.getHits().getTotalHits().value; + long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No api keys found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryApiKeysResult.EMPTY); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java index d866bd2a9d22..74a9aa7291ba 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/NativeUsersStore.java @@ -179,7 +179,7 @@ public class NativeUsersStore { TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - final long total = searchResponse.getHits().getTotalHits().value; + final long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No users found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryUserResults.EMPTY); @@ -214,7 +214,7 @@ public class NativeUsersStore { .setSize(0) .setTrackTotalHits(true) .request(), - listener.safeMap(response -> response.getHits().getTotalHits().value), + listener.safeMap(response -> response.getHits().getTotalHits().value()), client::search ) ); @@ -706,7 +706,7 @@ public class NativeUsersStore { @Override public void onResponse(SearchResponse searchResponse) { Map userInfos = new HashMap<>(); - assert searchResponse.getHits().getTotalHits().value <= 10 + assert searchResponse.getHits().getTotalHits().value() <= 10 : "there are more than 10 reserved users we need to change this to retrieve them all!"; for (SearchHit searchHit : searchResponse.getHits().getHits()) { Map sourceMap = searchHit.getSourceAsMap(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index 1c773a6e3963..fa6187798da2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -85,6 +85,7 @@ import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeRes import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.NamedClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.Privilege; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.support.StringMatcher; import org.elasticsearch.xpack.core.sql.SqlAsyncActionNames; import org.elasticsearch.xpack.security.action.user.TransportChangePasswordAction; @@ -550,7 +551,7 @@ public class RBACEngine implements AuthorizationEngine { Automaton existingPermissions = permissionMap.computeIfAbsent(entry.getKey(), role::allowedActionsMatcher); for (String alias : entry.getValue()) { Automaton newNamePermissions = permissionMap.computeIfAbsent(alias, role::allowedActionsMatcher); - if (Operations.subsetOf(newNamePermissions, existingPermissions) == false) { + if (Automatons.subsetOf(newNamePermissions, existingPermissions) == false) { listener.onResponse(AuthorizationResult.deny()); return; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java index 40cb3ea4d986..8ff535f3f623 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/DeprecationRoleDescriptorConsumer.java @@ -10,7 +10,6 @@ package org.elasticsearch.xpack.security.authz.store; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.DeprecationCategory; @@ -21,6 +20,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.IndicesPrivileges; import org.elasticsearch.xpack.core.security.authz.privilege.IndexPrivilege; +import org.elasticsearch.xpack.core.security.support.Automatons; import org.elasticsearch.xpack.core.security.support.StringMatcher; import java.time.ZoneOffset; @@ -195,7 +195,7 @@ public final class DeprecationRoleDescriptorConsumer implements Consumer IndexPrivilege.get(indexPrivileges).getAutomaton() ); - if (false == Operations.subsetOf(indexPrivilegeAutomaton, aliasPrivilegeAutomaton)) { + if (false == Automatons.subsetOf(indexPrivilegeAutomaton, aliasPrivilegeAutomaton)) { inferiorIndexNames.add(index.getName()); } } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java index 7c242fb07b68..4ae17a679d20 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/NativeRolesStore.java @@ -279,7 +279,7 @@ public class NativeRolesStore implements BiConsumer, ActionListener< TransportSearchAction.TYPE, searchRequest, ActionListener.wrap(searchResponse -> { - long total = searchResponse.getHits().getTotalHits().value; + long total = searchResponse.getHits().getTotalHits().value(); if (total == 0) { logger.debug("No roles found for query [{}]", searchRequest.source().query()); listener.onResponse(QueryRoleResult.EMPTY); @@ -731,28 +731,28 @@ public class NativeRolesStore implements BiConsumer, ActionListener< if (responses[0].isFailure()) { usageStats.put("size", 0); } else { - usageStats.put("size", responses[0].getResponse().getHits().getTotalHits().value); + usageStats.put("size", responses[0].getResponse().getHits().getTotalHits().value()); } if (responses[1].isFailure()) { usageStats.put("fls", false); } else { - usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits().value > 0L); + usageStats.put("fls", responses[1].getResponse().getHits().getTotalHits().value() > 0L); } if (responses[2].isFailure()) { usageStats.put("dls", false); } else { - usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits().value > 0L); + usageStats.put("dls", responses[2].getResponse().getHits().getTotalHits().value() > 0L); } if (responses[3].isFailure()) { usageStats.put("remote_indices", 0); } else { - usageStats.put("remote_indices", responses[3].getResponse().getHits().getTotalHits().value); + usageStats.put("remote_indices", responses[3].getResponse().getHits().getTotalHits().value()); } if (responses[4].isFailure()) { usageStats.put("remote_cluster", 0); } else { - usageStats.put("remote_cluster", responses[4].getResponse().getHits().getTotalHits().value); + usageStats.put("remote_cluster", responses[4].getResponse().getHits().getTotalHits().value()); } delegate.onResponse(usageStats); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index b347ceb833f6..b347c278aae0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -414,19 +414,19 @@ public class ProfileService { logger.debug("error on counting total profiles", items[0].getFailure()); usage.put("total", 0L); } else { - usage.put("total", items[0].getResponse().getHits().getTotalHits().value); + usage.put("total", items[0].getResponse().getHits().getTotalHits().value()); } if (items[1].isFailure()) { logger.debug("error on counting enabled profiles", items[0].getFailure()); usage.put("enabled", 0L); } else { - usage.put("enabled", items[1].getResponse().getHits().getTotalHits().value); + usage.put("enabled", items[1].getResponse().getHits().getTotalHits().value()); } if (items[2].isFailure()) { logger.debug("error on counting recent profiles", items[0].getFailure()); usage.put("recent", 0L); } else { - usage.put("recent", items[2].getResponse().getHits().getTotalHits().value); + usage.put("recent", items[2].getResponse().getHits().getTotalHits().value()); } listener.onResponse(usage); }, listener::onFailure) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java index 5ec76a8dc3d0..5cd8cba763d3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecurityMigrations.java @@ -77,8 +77,8 @@ public class SecurityMigrations { client.search(countRequest, ActionListener.wrap(response -> { // If there are no roles, skip migration - if (response.getHits().getTotalHits().value > 0) { - logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value + "] roles"); + if (response.getHits().getTotalHits().value() > 0) { + logger.info("Preparing to migrate [" + response.getHits().getTotalHits().value() + "] roles"); updateRolesByQuery(indexManager, client, filterQuery, listener); } else { listener.onResponse(null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java index 0a2c40d2a257..a4d9dacd1a63 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/FileRolesStoreTests.java @@ -8,7 +8,7 @@ package org.elasticsearch.xpack.security.authz.store; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.automaton.MinimizationOperations; +import org.apache.lucene.tests.util.automaton.AutomatonTestUtil; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -137,8 +137,8 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("idx3")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton())); - assertTrue(Operations.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton())); + assertTrue(AutomatonTestUtil.subsetOf(IndexPrivilege.READ.getAutomaton(), group.privilege().getAutomaton())); + assertTrue(AutomatonTestUtil.subsetOf(IndexPrivilege.WRITE.getAutomaton(), group.privilege().getAutomaton())); descriptor = roles.get("role1.ab"); assertNotNull(descriptor); @@ -181,9 +181,9 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(group.indices()[0], equalTo("/.*_.*/")); assertThat(group.privilege(), notNullValue()); assertTrue( - Operations.sameLanguage( + AutomatonTestUtil.sameLanguage( group.privilege().getAutomaton(), - MinimizationOperations.minimize( + Operations.determinize( Operations.union(IndexPrivilege.READ.getAutomaton(), IndexPrivilege.WRITE.getAutomaton()), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT ) @@ -236,7 +236,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("field_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); @@ -258,7 +258,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertFalse(group.getFieldPermissions().hasFieldLevelSecurity()); assertThat(group.getQuery(), notNullValue()); @@ -279,7 +279,7 @@ public class FileRolesStoreTests extends ESTestCase { assertThat(group.indices().length, is(1)); assertThat(group.indices()[0], equalTo("query_fields_idx")); assertThat(group.privilege(), notNullValue()); - assertTrue(Operations.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); + assertTrue(AutomatonTestUtil.sameLanguage(group.privilege().getAutomaton(), IndexPrivilege.READ.getAutomaton())); assertTrue(group.getFieldPermissions().grantsAccessTo("foo")); assertTrue(group.getFieldPermissions().grantsAccessTo("boo")); assertTrue(group.getFieldPermissions().hasFieldLevelSecurity()); diff --git a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java index d42d45e43062..e5171a7c5165 100644 --- a/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java +++ b/x-pack/plugin/slm/src/internalClusterTest/java/org/elasticsearch/xpack/slm/SLMSnapshotBlockingIntegTests.java @@ -281,7 +281,7 @@ public class SLMSnapshotBlockingIntegTests extends AbstractSnapshotIntegTestCase completedSnapshotName, Strings.arrayToCommaDelimitedString(resp.getHits().getHits()) ); - assertThat(resp.getHits().getTotalHits().value, equalTo(2L)); + assertThat(resp.getHits().getTotalHits().value(), equalTo(2L)); } ); }); diff --git a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java index df8dc54bb749..405a9926e2e5 100644 --- a/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java +++ b/x-pack/plugin/snapshot-based-recoveries/src/internalClusterTest/java/org/elasticsearch/xpack/snapshotbasedrecoveries/recovery/SnapshotBasedIndexRecoveryIT.java @@ -1595,7 +1595,7 @@ public class SnapshotBasedIndexRecoveryIT extends AbstractSnapshotIntegTestCase int docIdToMatch = randomIntBetween(0, docCount - 1); assertResponse(searchRequestBuilder.setQuery(QueryBuilders.termQuery("field", docIdToMatch)), searchResponse -> { assertThat(searchResponse.getSuccessfulShards(), equalTo(1)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L)); SearchHit searchHit = searchResponse.getHits().getAt(0); Map source = searchHit.getSourceAsMap(); assertThat(source, is(notNullValue())); @@ -1613,7 +1613,7 @@ public class SnapshotBasedIndexRecoveryIT extends AbstractSnapshotIntegTestCase private void assertSearchResponseContainsAllIndexedDocs(SearchResponse searchResponse, long docCount) { assertThat(searchResponse.getSuccessfulShards(), equalTo(1)); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo(docCount)); + assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(docCount)); for (int i = 0; i < searchResponse.getHits().getHits().length; i++) { SearchHit searchHit = searchResponse.getHits().getAt(i); Map source = searchHit.getSourceAsMap(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java index 3c64d140e2b5..e7b9156d5fb6 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoGridAggAndQueryConsistencyIT.java @@ -129,7 +129,7 @@ public class GeoGridAggAndQueryConsistencyIT extends ESIntegTestCase { client().prepareSearch("test").setTrackTotalHits(true).setQuery(queryBuilder), innerResponse -> assertThat( "Bucket " + bucket.getKeyAsString(), - innerResponse.getHits().getTotalHits().value, + innerResponse.getHits().getTotalHits().value(), Matchers.equalTo(bucket.getDocCount()) ) ); @@ -320,7 +320,7 @@ public class GeoGridAggAndQueryConsistencyIT extends ESIntegTestCase { client().prepareSearch("test").setTrackTotalHits(true).setQuery(queryBuilder), response -> assertThat( "Expected hits at precision " + precision + " for H3 cell " + bucket.getKeyAsString(), - response.getHits().getTotalHits().value, + response.getHits().getTotalHits().value(), Matchers.equalTo(bucket.getDocCount()) ) ); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java index b4a3a07502ab..b4d7a472591b 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/GeoShapeWithDocValuesIT.java @@ -175,7 +175,7 @@ public class GeoShapeWithDocValuesIT extends GeoShapeIntegTestCase { indexRandom(true, prepareIndex("test").setId("0").setSource(source, XContentType.JSON)); assertNoFailuresAndResponse(client().prepareSearch("test").setFetchSource(false).addStoredField("shape"), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); SearchHit searchHit = response.getHits().getAt(0); assertThat(searchHit.field("shape").getValue(), instanceOf(BytesRef.class)); BytesRef bytesRef = searchHit.field("shape").getValue(); diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java index 1c013aba5226..4f23b6de4c37 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryOverShapeTests.java @@ -247,7 +247,7 @@ public class ShapeQueryOverShapeTests extends ShapeQueryTestCase { assertResponse( client().prepareSearch(INDEX).setQuery(new ShapeQueryBuilder("alias", queryGeometry).relation(ShapeRelation.INTERSECTS)), response -> { - assertTrue(response.getHits().getTotalHits().value > 0); + assertTrue(response.getHits().getTotalHits().value() > 0); } ); } diff --git a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java index 1ac6bf3b6fd3..e26066cd89c5 100644 --- a/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java +++ b/x-pack/plugin/spatial/src/internalClusterTest/java/org/elasticsearch/xpack/spatial/search/ShapeQueryTestCase.java @@ -112,7 +112,7 @@ public abstract class ShapeQueryTestCase extends ESSingleNodeTestCase { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -123,7 +123,7 @@ public abstract class ShapeQueryTestCase extends ESSingleNodeTestCase { assertNoFailuresAndResponse( client().prepareSearch(defaultIndexName).setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -138,7 +138,7 @@ public abstract class ShapeQueryTestCase extends ESSingleNodeTestCase { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, circle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(2L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(2L)); assertThat(response.getHits().getHits().length, equalTo(2)); assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); @@ -154,7 +154,7 @@ public abstract class ShapeQueryTestCase extends ESSingleNodeTestCase { .setQuery(new ShapeQueryBuilder(defaultFieldName, polygon).relation(ShapeRelation.INTERSECTS)), response -> { SearchHits searchHits = response.getHits(); - assertThat(searchHits.getTotalHits().value, equalTo(2L)); + assertThat(searchHits.getTotalHits().value(), equalTo(2L)); assertThat(searchHits.getAt(0).getId(), anyOf(equalTo("1"), equalTo("4"))); assertThat(searchHits.getAt(1).getId(), anyOf(equalTo("1"), equalTo("4"))); } @@ -175,7 +175,7 @@ public abstract class ShapeQueryTestCase extends ESSingleNodeTestCase { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, mp).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(3L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(3L)); assertThat(response.getHits().getHits().length, equalTo(3)); assertThat(response.getHits().getAt(0).getId(), not(equalTo("3"))); assertThat(response.getHits().getAt(1).getId(), not(equalTo("3"))); @@ -191,7 +191,7 @@ public abstract class ShapeQueryTestCase extends ESSingleNodeTestCase { client().prepareSearch(defaultIndexName) .setQuery(new ShapeQueryBuilder(defaultFieldName, rectangle).relation(ShapeRelation.INTERSECTS)), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } @@ -232,7 +232,7 @@ public abstract class ShapeQueryTestCase extends ESSingleNodeTestCase { .indexedShapePath(indexedShapePath) ), response -> { - assertThat(response.getHits().getTotalHits().value, equalTo(1L)); + assertThat(response.getHits().getTotalHits().value(), equalTo(1L)); assertThat(response.getHits().getHits().length, equalTo(1)); assertThat(response.getHits().getAt(0).getId(), equalTo("2")); } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java index 66f5597be543..2713afc149e0 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/ingest/CircleProcessorTests.java @@ -230,8 +230,8 @@ public class CircleProcessorTests extends ESTestCase { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.search(sameShapeQuery, 1).totalHits.value, equalTo(1L)); - assertThat(searcher.search(pointOnDatelineQuery, 1).totalHits.value, equalTo(1L)); + assertThat(searcher.search(sameShapeQuery, 1).totalHits.value(), equalTo(1L)); + assertThat(searcher.search(pointOnDatelineQuery, 1).totalHits.value(), equalTo(1L)); } } } @@ -261,8 +261,8 @@ public class CircleProcessorTests extends ESTestCase { try (IndexReader reader = w.getReader()) { IndexSearcher searcher = newSearcher(reader); - assertThat(searcher.search(sameShapeQuery, 1).totalHits.value, equalTo(1L)); - assertThat(searcher.search(centerPointQuery, 1).totalHits.value, equalTo(1L)); + assertThat(searcher.search(sameShapeQuery, 1).totalHits.value(), equalTo(1L)); + assertThat(searcher.search(centerPointQuery, 1).totalHits.value(), equalTo(1L)); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index 06293df4f455..411a4cda868f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -223,7 +223,7 @@ public class Querier { } var totalHits = response.getHits().getTotalHits(); - var hits = totalHits != null ? "hits " + totalHits.relation + " " + totalHits.value + ", " : ""; + var hits = totalHits != null ? "hits " + totalHits.relation() + " " + totalHits.value() + ", " : ""; logger.trace( "Got search response [{}{} aggregations: [{}], {} failed shards, {} skipped shards, " + "{} successful shards, {} total shards, took {}, timed out [{}]]", @@ -548,7 +548,7 @@ public class Querier { List exts = new ArrayList<>(refs.size()); TotalHits totalHits = response.getHits().getTotalHits(); - ConstantExtractor totalCount = new TotalHitsExtractor(totalHits == null ? -1L : totalHits.value); + ConstantExtractor totalCount = new TotalHitsExtractor(totalHits == null ? -1L : totalHits.value()); for (QueryContainer.FieldInfo ref : refs) { exts.add(createExtractor(ref.extraction(), totalCount)); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java index 78976ea7e83c..cf52a5f5d712 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/extractor/TopHitsAggExtractor.java @@ -76,7 +76,7 @@ public class TopHitsAggExtractor implements BucketExtractor { throw new SqlIllegalArgumentException("Cannot find an aggregation named {}", name); } - if (agg.getHits().getTotalHits() == null || agg.getHits().getTotalHits().value == 0) { + if (agg.getHits().getTotalHits() == null || agg.getHits().getTotalHits().value() == 0) { return null; } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java index 8ee23e38f9ff..0ba29fef8e06 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformUsageTransportAction.java @@ -123,7 +123,7 @@ public class TransformUsageTransportAction extends XPackUsageFeatureTransportAct Arrays.toString(transformCountSuccess.getShardFailures()) ); } - long totalTransforms = transformCountSuccess.getHits().getTotalHits().value; + long totalTransforms = transformCountSuccess.getHits().getTotalHits().value(); if (totalTransforms == 0) { var usage = new TransformFeatureSetUsage(transformsCountByState, Collections.emptyMap(), new TransformIndexerStats()); listener.onResponse(new XPackUsageFeatureResponse(usage)); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java index f49d5fc96f3a..cd06a4cadaa3 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProvider.java @@ -84,7 +84,7 @@ class TimeBasedCheckpointProvider extends DefaultCheckpointProvider { client, TransportSearchAction.TYPE, searchRequest, - ActionListener.wrap(r -> listener.onResponse(r.getHits().getTotalHits().value > 0L), listener::onFailure) + ActionListener.wrap(r -> listener.onResponse(r.getHits().getTotalHits().value() > 0L), listener::onFailure) ); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java index ffc4b48f9cc3..9d5175922c89 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/persistence/IndexBasedTransformConfigManager.java @@ -509,7 +509,7 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager final ExpandedIdsMatcher requiredMatches = new ExpandedIdsMatcher(idTokens, allowNoMatch); executeAsyncWithOrigin(request, foundConfigsListener.delegateFailureAndWrap((l, searchResponse) -> { - long totalHits = searchResponse.getHits().getTotalHits().value; + long totalHits = searchResponse.getHits().getTotalHits().value(); // important: preserve order Set ids = Sets.newLinkedHashSetWithExpectedSize(searchResponse.getHits().getHits().length); Set configs = Sets.newLinkedHashSetWithExpectedSize(searchResponse.getHits().getHits().length); @@ -589,7 +589,7 @@ public class IndexBasedTransformConfigManager implements TransformConfigManager .trackTotalHitsUpTo(1) ); executeAsyncWithOrigin(TransportSearchAction.TYPE, searchRequest, deleteListener.delegateFailureAndWrap((l, searchResponse) -> { - if (searchResponse.getHits().getTotalHits().value == 0) { + if (searchResponse.getHits().getTotalHits().value() == 0) { listener.onFailure( new ResourceNotFoundException(TransformMessages.getMessage(TransformMessages.REST_UNKNOWN_TRANSFORM, transformId)) ); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java index 23bab56de5ec..2de810b2b902 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/common/AbstractCompositeAggFunction.java @@ -207,7 +207,7 @@ public abstract class AbstractCompositeAggFunction implements Function { @Override public void getInitialProgressFromResponse(SearchResponse response, ActionListener progressListener) { - progressListener.onResponse(new TransformProgress(response.getHits().getTotalHits().value, 0L, 0L)); + progressListener.onResponse(new TransformProgress(response.getHits().getTotalHits().value(), 0L, 0L)); } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java index 684e3a085405..68b31d4f466b 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollector.java @@ -7,7 +7,7 @@ package org.elasticsearch.xpack.transform.transforms.pivot; -import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.geo.GeoPoint; @@ -560,7 +560,7 @@ public class CompositeBucketsChangeCollector implements ChangeCollector { @Override public int getMaxPageSize() { // this collector is limited by indices.query.bool.max_clause_count, default 1024 - return BooleanQuery.getMaxClauseCount(); + return IndexSearcher.getMaxClauseCount(); } @Override diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java index 706337768a29..5f7c6490e51f 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/WatcherConcreteIndexTests.java @@ -50,7 +50,7 @@ public class WatcherConcreteIndexTests extends AbstractWatcherIntegrationTestCas assertBusy(() -> { assertResponse( prepareSearch(watchResultsIndex).setTrackTotalHits(true), - searchResponse -> assertThat((int) searchResponse.getHits().getTotalHits().value, greaterThan(0)) + searchResponse -> assertThat((int) searchResponse.getHits().getTotalHits().value(), greaterThan(0)) ); }); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java index f1ad29607b5b..7fa5365afa0a 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/actions/TimeThrottleIntegrationTests.java @@ -120,7 +120,7 @@ public class TimeThrottleIntegrationTests extends AbstractWatcherIntegrationTest assertResponse( prepareSearch(HistoryStoreField.DATA_STREAM + "*").setSize(0) .setSource(new SearchSourceBuilder().query(QueryBuilders.boolQuery().must(termQuery("watch_id", id)))), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(oneOf(expectedCount, expectedCount + 1))) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(oneOf(expectedCount, expectedCount + 1))) ); }); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java index 60867ba5d441..4068c534013b 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryActionConditionTests.java @@ -122,7 +122,7 @@ public class HistoryActionConditionTests extends AbstractWatcherIntegrationTestC ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); @@ -176,7 +176,7 @@ public class HistoryActionConditionTests extends AbstractWatcherIntegrationTestC ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); @@ -236,7 +236,7 @@ public class HistoryActionConditionTests extends AbstractWatcherIntegrationTestC ensureGreen(HistoryStoreField.DATA_STREAM); final SearchResponse response = searchHistory(SearchSourceBuilder.searchSource().query(termQuery("watch_id", id))); try { - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); searchHitReference.set(response.getHits().getAt(0).asUnpooled()); } finally { response.decRef(); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java index 5b7ea39079f2..dac87eaa6f03 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateEmailMappingsTests.java @@ -105,7 +105,7 @@ public class HistoryTemplateEmailMappingsTests extends AbstractWatcherIntegratio ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java index 97347de1ea23..ffac36846414 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateHttpMappingsTests.java @@ -102,7 +102,7 @@ public class HistoryTemplateHttpMappingsTests extends AbstractWatcherIntegration ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java index 7dde279fb90d..8dec5287ae60 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateIndexActionMappingsTests.java @@ -54,7 +54,7 @@ public class HistoryTemplateIndexActionMappingsTests extends AbstractWatcherInte ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java index 567d4acfa45e..b268caa45f47 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/history/HistoryTemplateSearchInputMappingsTests.java @@ -72,7 +72,7 @@ public class HistoryTemplateSearchInputMappingsTests extends AbstractWatcherInte ), response -> { assertThat(response, notNullValue()); - assertThat(response.getHits().getTotalHits().value, is(oneOf(1L, 2L))); + assertThat(response.getHits().getTotalHits().value(), is(oneOf(1L, 2L))); InternalAggregations aggs = response.getAggregations(); assertThat(aggs, notNullValue()); diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java index 5dc537fc259d..5eaf27e7b267 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/AbstractWatcherIntegrationTestCase.java @@ -377,7 +377,7 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase lastResponse.set(searchResponse); assertThat( "could not find executed watch record for watch " + watchName, - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(minimumExpectedWatchActionsWithActionPerformed) ); if (assertConditionMet) { @@ -396,7 +396,7 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); try { - logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); + logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value(), watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); @@ -452,7 +452,7 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase searchResponse -> { lastResponse.set(searchResponse); assertThat( - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(expectedWatchActionsWithNoActionNeeded) ); } @@ -461,7 +461,7 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase } catch (AssertionError error) { SearchResponse searchResponse = lastResponse.get(); try { - logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value, watchName); + logger.info("Found [{}] records for watch [{}]", searchResponse.getHits().getTotalHits().value(), watchName); int counter = 1; for (SearchHit hit : searchResponse.getHits().getHits()) { logger.info("hit [{}]=\n {}", counter++, XContentHelper.convertToJson(hit.getSourceRef(), true, true)); @@ -497,7 +497,7 @@ public abstract class AbstractWatcherIntegrationTestCase extends ESIntegTestCase searchResponse -> { assertThat( "could not find executed watch record", - searchResponse.getHits().getTotalHits().value, + searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(recordCount) ); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java index 99640d1ebc3e..03f1e6cb57eb 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/BootStrapTests.java @@ -296,8 +296,8 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase { AtomicLong successfulWatchExecutions = new AtomicLong(); refresh(); assertResponse(prepareSearch("output"), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(numberOfWatches))); - successfulWatchExecutions.set(searchResponse.getHits().getTotalHits().value); + assertThat(searchResponse.getHits().getTotalHits().value(), is(greaterThanOrEqualTo(numberOfWatches))); + successfulWatchExecutions.set(searchResponse.getHits().getTotalHits().value()); }); // the watch history should contain entries for each triggered watch, which a few have been marked as not executed @@ -378,7 +378,7 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase { // the actual documents are in the output index refresh(); assertResponse(prepareSearch(HistoryStoreField.DATA_STREAM).setSize(numRecords), searchResponse -> { - assertThat(searchResponse.getHits().getTotalHits().value, Matchers.equalTo((long) numRecords)); + assertThat(searchResponse.getHits().getTotalHits().value(), Matchers.equalTo((long) numRecords)); for (int i = 0; i < numRecords; i++) { assertThat(searchResponse.getHits().getAt(i).getSourceAsMap().get("state"), is(ExecutionState.EXECUTED.id())); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java index e5f4091ca89e..f3648580691c 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/RejectedExecutionTests.java @@ -55,7 +55,7 @@ public class RejectedExecutionTests extends AbstractWatcherIntegrationTestCase { assertBusy(() -> { flushAndRefresh(".watcher-history-*"); assertResponse(prepareSearch(".watcher-history-*"), searchResponse -> { - assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value, greaterThanOrEqualTo(2L)); + assertThat("Watcher history not found", searchResponse.getHits().getTotalHits().value(), greaterThanOrEqualTo(2L)); assertThat( "Did not find watcher history for rejected watch", Arrays.stream(searchResponse.getHits().getHits()) diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java index 7ff293ed9b15..fbb1996a4cf4 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/SingleNodeTests.java @@ -69,7 +69,7 @@ public class SingleNodeTests extends AbstractWatcherIntegrationTestCase { assertThat(refreshResponse.getStatus(), equalTo(RestStatus.OK)); assertResponse( prepareSearch(".watcher-history*").setSize(0), - searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, is(greaterThanOrEqualTo(1L))) + searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), is(greaterThanOrEqualTo(1L))) ); }, 30, TimeUnit.SECONDS); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java index 4298f641cbdd..e12805f3ace0 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/test/integration/WatchMetadataTests.java @@ -79,7 +79,7 @@ public class WatchMetadataTests extends AbstractWatcherIntegrationTestCase { } assertNotNull(searchResponse); try { - assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L)); + assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L)); } finally { searchResponse.decRef(); } diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java index 92ac91a63e09..2ec6541275d0 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/transform/TransformIntegrationTests.java @@ -139,13 +139,13 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key3").toString(), equalTo("20")); }); @@ -184,12 +184,12 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("mytestresult")); }); } @@ -223,13 +223,13 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas refresh(); assertNoFailuresAndResponse(prepareSearch("output1"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); }); assertNoFailuresAndResponse(prepareSearch("output2"), response -> { - assertThat(response.getHits().getTotalHits().value, greaterThanOrEqualTo(1L)); + assertThat(response.getHits().getTotalHits().value(), greaterThanOrEqualTo(1L)); assertThat(response.getHits().getAt(0).getSourceAsMap().size(), equalTo(1)); assertThat(response.getHits().getAt(0).getSourceAsMap().get("key4").toString(), equalTo("30")); }); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java index 5389f3421227..0ea9b432d3b0 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherService.java @@ -340,7 +340,7 @@ public class WatcherService { throw new ElasticsearchException("Partial response while loading watches"); } - if (response.getHits().getTotalHits().value == 0) { + if (response.getHits().getTotalHits().value() == 0) { return Collections.emptyList(); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java index 9a165112c41d..327d345af864 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/common/http/HttpClient.java @@ -42,7 +42,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.CharacterRunAutomaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.cluster.service.ClusterService; @@ -440,7 +439,7 @@ public class HttpClient implements Closeable { } Automaton whiteListAutomaton = Regex.simpleMatchToAutomaton(whiteListedHosts.toArray(Strings.EMPTY_ARRAY)); - whiteListAutomaton = MinimizationOperations.minimize(whiteListAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); + whiteListAutomaton = Operations.determinize(whiteListAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); return new CharacterRunAutomaton(whiteListAutomaton); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java index 6775dca424bf..dfa0c47493ed 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStore.java @@ -156,7 +156,7 @@ public class TriggeredWatchStore { SearchResponse response = null; try { response = client.search(searchRequest).actionGet(defaultSearchTimeout); - logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits().value); + logger.debug("trying to find triggered watches for ids {}: found [{}] docs", ids, response.getHits().getTotalHits().value()); while (response.getHits().getHits().length != 0) { for (SearchHit hit : response.getHits()) { Wid wid = new Wid(hit.getId()); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java index 9d6186e9c1c4..e6bd1b0efb95 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/search/ExecutableSearchInput.java @@ -91,7 +91,7 @@ public class ExecutableSearchInput extends ExecutableInput listener) { - assert searchResponse.getHits().getTotalHits().relation == TotalHits.Relation.EQUAL_TO; + assert searchResponse.getHits().getTotalHits().relation() == TotalHits.Relation.EQUAL_TO; List items = Arrays.stream(searchResponse.getHits().getHits()) .map(this::transformSearchHit) .toList(); - listener.onResponse(new QueryWatchesAction.Response(searchResponse.getHits().getTotalHits().value, items)); + listener.onResponse(new QueryWatchesAction.Response(searchResponse.getHits().getTotalHits().value(), items)); } QueryWatchesAction.Response.Item transformSearchHit(SearchHit searchHit) { diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java index 608e5f197237..191775f46cd7 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/BinaryDvConfirmedAutomatonQuery.java @@ -18,6 +18,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryVisitor; import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.ScorerSupplier; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.util.BytesRef; @@ -69,44 +70,56 @@ public class BinaryDvConfirmedAutomatonQuery extends Query { return new ConstantScoreWeight(this, boost) { @Override - public Scorer scorer(LeafReaderContext context) throws IOException { + public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException { ByteArrayStreamInput bytes = new ByteArrayStreamInput(); final BinaryDocValues values = DocValues.getBinary(context.reader(), field); - Scorer approxScorer = approxWeight.scorer(context); - if (approxScorer == null) { + ScorerSupplier approxScorerSupplier = approxWeight.scorerSupplier(context); + if (approxScorerSupplier == null) { // No matches to be had return null; } - DocIdSetIterator approxDisi = approxScorer.iterator(); - TwoPhaseIterator twoPhase = new TwoPhaseIterator(approxDisi) { - @Override - public boolean matches() throws IOException { - if (values.advanceExact(approxDisi.docID()) == false) { - // Can happen when approxQuery resolves to some form of MatchAllDocs expression - return false; - } - BytesRef arrayOfValues = values.binaryValue(); - bytes.reset(arrayOfValues.bytes); - bytes.setPosition(arrayOfValues.offset); - int size = bytes.readVInt(); - for (int i = 0; i < size; i++) { - int valLength = bytes.readVInt(); - if (bytesMatcher.run(arrayOfValues.bytes, bytes.getPosition(), valLength)) { - return true; + return new ScorerSupplier() { + @Override + public Scorer get(long leadCost) throws IOException { + Scorer approxScorer = approxScorerSupplier.get(leadCost); + DocIdSetIterator approxDisi = approxScorer.iterator(); + TwoPhaseIterator twoPhase = new TwoPhaseIterator(approxDisi) { + @Override + public boolean matches() throws IOException { + if (values.advanceExact(approxDisi.docID()) == false) { + // Can happen when approxQuery resolves to some form of MatchAllDocs expression + return false; + } + BytesRef arrayOfValues = values.binaryValue(); + bytes.reset(arrayOfValues.bytes); + bytes.setPosition(arrayOfValues.offset); + + int size = bytes.readVInt(); + for (int i = 0; i < size; i++) { + int valLength = bytes.readVInt(); + if (bytesMatcher.run(arrayOfValues.bytes, bytes.getPosition(), valLength)) { + return true; + } + bytes.skipBytes(valLength); + } + return false; } - bytes.skipBytes(valLength); - } - return false; + + @Override + public float matchCost() { + // TODO: how can we compute this? + return 1000f; + } + }; + return new ConstantScoreScorer(score(), scoreMode, twoPhase); } @Override - public float matchCost() { - // TODO: how can we compute this? - return 1000f; + public long cost() { + return approxScorerSupplier.cost(); } }; - return new ConstantScoreScorer(this, score(), scoreMode, twoPhase); } @Override diff --git a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java index 7784e7ffdda1..f3b01bb89812 100644 --- a/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java +++ b/x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java @@ -38,7 +38,6 @@ import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.WildcardQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; -import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ElasticsearchParseException; @@ -349,7 +348,7 @@ public class WildcardFieldMapper extends FieldMapper { } Automaton automaton = caseInsensitive ? AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term(name(), wildcardPattern)) - : WildcardQuery.toAutomaton(new Term(name(), wildcardPattern)); + : WildcardQuery.toAutomaton(new Term(name(), wildcardPattern), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); if (clauseCount > 0) { // We can accelerate execution with the ngram query BooleanQuery approxQuery = rewritten.build(); @@ -379,7 +378,6 @@ public class WildcardFieldMapper extends FieldMapper { RegExp regExp = new RegExp(value, syntaxFlags, matchFlags); Automaton a = regExp.toAutomaton(); a = Operations.determinize(a, maxDeterminizedStates); - a = MinimizationOperations.minimize(a, maxDeterminizedStates); if (Operations.isTotal(a)) { // Will match all return existsQuery(context); } @@ -390,7 +388,7 @@ public class WildcardFieldMapper extends FieldMapper { Query approxNgramQuery = rewriteBoolToNgramQuery(approxBooleanQuery); RegExp regex = new RegExp(value, syntaxFlags, matchFlags); - Automaton automaton = regex.toAutomaton(maxDeterminizedStates); + Automaton automaton = Operations.determinize(regex.toAutomaton(), maxDeterminizedStates); // We can accelerate execution with the ngram query return new BinaryDvConfirmedAutomatonQuery(approxNgramQuery, name(), value, automaton); @@ -550,9 +548,9 @@ public class WildcardFieldMapper extends FieldMapper { BooleanQuery.Builder rewritten = new BooleanQuery.Builder(); int clauseCount = 0; for (BooleanClause clause : bq) { - Query q = rewriteBoolToNgramQuery(clause.getQuery()); + Query q = rewriteBoolToNgramQuery(clause.query()); if (q != null) { - if (clause.getOccur().equals(Occur.FILTER)) { + if (clause.occur().equals(Occur.FILTER)) { // Can't drop "should" clauses because it can elevate a sibling optional item // to mandatory (shoulds with 1 clause) causing false negatives // Dropping MUSTs increase false positives which are OK because are verified anyway. @@ -561,7 +559,7 @@ public class WildcardFieldMapper extends FieldMapper { break; } } - rewritten.add(q, clause.getOccur()); + rewritten.add(q, clause.occur()); } } return rewritten.build(); diff --git a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java index 4b9ccff6f526..a1a01ebdcc59 100644 --- a/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java +++ b/x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java @@ -41,6 +41,7 @@ import org.apache.lucene.tests.store.BaseDirectoryWrapper; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.automaton.Automaton; import org.apache.lucene.util.automaton.ByteRunAutomaton; +import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -182,7 +183,7 @@ public class WildcardFieldMapperTests extends MapperTestCase { Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery("*a*", null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(1L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(1L)); reader.close(); dir.close(); @@ -229,12 +230,12 @@ public class WildcardFieldMapperTests extends MapperTestCase { String queryString = randomABString((IndexSearcher.getMaxClauseCount() * 2) + 1); Query wildcardFieldQuery = wildcardFieldType.fieldType().wildcardQuery(queryString, null, null); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(0L)); // Test regexp query wildcardFieldQuery = wildcardFieldType.fieldType().regexpQuery(queryString, RegExp.ALL, 0, 20000, null, MOCK_CONTEXT); wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.INDEXORDER); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(0L)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(0L)); reader.close(); dir.close(); @@ -271,13 +272,13 @@ public class WildcardFieldMapperTests extends MapperTestCase { private void expectTermMatch(IndexSearcher searcher, String term, long count) throws IOException { Query q = wildcardFieldType.fieldType().termQuery(term, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); - assertThat(td.totalHits.value, equalTo(count)); + assertThat(td.totalHits.value(), equalTo(count)); } private void expectPrefixMatch(IndexSearcher searcher, String term, long count) throws IOException { Query q = wildcardFieldType.fieldType().prefixQuery(term, null, MOCK_CONTEXT); TopDocs td = searcher.search(q, 10, Sort.RELEVANCE); - assertThat(td.totalHits.value, equalTo(count)); + assertThat(td.totalHits.value(), equalTo(count)); } public void testSearchResultsVersusKeywordField() throws IOException { @@ -390,8 +391,8 @@ public class WildcardFieldMapperTests extends MapperTestCase { TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, values.size() + 1, Sort.RELEVANCE); assertThat( keywordFieldQuery + "\n" + wildcardFieldQuery, - wildcardFieldTopDocs.totalHits.value, - equalTo(kwTopDocs.totalHits.value) + wildcardFieldTopDocs.totalHits.value(), + equalTo(kwTopDocs.totalHits.value()) ); HashSet expectedDocs = new HashSet<>(); @@ -497,7 +498,7 @@ public class WildcardFieldMapperTests extends MapperTestCase { TopDocs kwTopDocs = searcher.search(keywordFieldQuery, 10, Sort.RELEVANCE); TopDocs wildcardFieldTopDocs = searcher.search(wildcardFieldQuery, 10, Sort.RELEVANCE); - assertThat(wildcardFieldTopDocs.totalHits.value, equalTo(kwTopDocs.totalHits.value)); + assertThat(wildcardFieldTopDocs.totalHits.value(), equalTo(kwTopDocs.totalHits.value())); HashSet expectedDocs = new HashSet<>(); for (ScoreDoc topDoc : kwTopDocs.scoreDocs) { @@ -642,7 +643,7 @@ public class WildcardFieldMapperTests extends MapperTestCase { public void testQueryCachingEquality() throws IOException, ParseException { String pattern = "A*b*B?a"; // Case sensitivity matters when it comes to caching - Automaton caseSensitiveAutomaton = WildcardQuery.toAutomaton(new Term("field", pattern)); + Automaton caseSensitiveAutomaton = WildcardQuery.toAutomaton(new Term("field", pattern), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); Automaton caseInSensitiveAutomaton = AutomatonQueries.toCaseInsensitiveWildcardAutomaton(new Term("field", pattern)); BinaryDvConfirmedAutomatonQuery csQ = new BinaryDvConfirmedAutomatonQuery( new MatchAllDocsQuery(), @@ -660,7 +661,10 @@ public class WildcardFieldMapperTests extends MapperTestCase { assertNotEquals(csQ.hashCode(), ciQ.hashCode()); // Same query should be equal - Automaton caseSensitiveAutomaton2 = WildcardQuery.toAutomaton(new Term("field", pattern)); + Automaton caseSensitiveAutomaton2 = WildcardQuery.toAutomaton( + new Term("field", pattern), + Operations.DEFAULT_DETERMINIZE_WORK_LIMIT + ); BinaryDvConfirmedAutomatonQuery csQ2 = new BinaryDvConfirmedAutomatonQuery( new MatchAllDocsQuery(), "field", @@ -880,11 +884,11 @@ public class WildcardFieldMapperTests extends MapperTestCase { if (q instanceof BooleanQuery bq) { BooleanQuery.Builder result = new BooleanQuery.Builder(); for (BooleanClause cq : bq.clauses()) { - Query rewritten = rewriteFiltersToMustsForComparisonPurposes(cq.getQuery()); - if (cq.getOccur() == Occur.FILTER) { + Query rewritten = rewriteFiltersToMustsForComparisonPurposes(cq.query()); + if (cq.occur() == Occur.FILTER) { result.add(rewritten, Occur.MUST); } else { - result.add(rewritten, cq.getOccur()); + result.add(rewritten, cq.occur()); } } return result.build(); @@ -1013,8 +1017,9 @@ public class WildcardFieldMapperTests extends MapperTestCase { } // Assert our randomly generated regex actually matches the provided raw input. - RegExp regex = new RegExp(result.toString()); - Automaton automaton = regex.toAutomaton(); + int includeDeprecatedComplement = RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT; + RegExp regex = new RegExp(result.toString(), includeDeprecatedComplement); + Automaton automaton = Operations.determinize(regex.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT); ByteRunAutomaton bytesMatcher = new ByteRunAutomaton(automaton); BytesRef br = new BytesRef(randomValue); assertTrue( diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index d61c143098fc..f502683e42eb 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -377,7 +377,7 @@ public class OldRepositoryAccessIT extends ESRestTestCase { try { logger.info(searchResponse); // check hit count - assertEquals(numDocs, searchResponse.getHits().getTotalHits().value); + assertEquals(numDocs, searchResponse.getHits().getTotalHits().value()); // check that _index is properly set assertTrue(Arrays.stream(searchResponse.getHits().getHits()).map(SearchHit::getIndex).allMatch(index::equals)); // check that all _ids are there @@ -404,7 +404,7 @@ public class OldRepositoryAccessIT extends ESRestTestCase { ); try { logger.info(searchResponse); - assertEquals(1, searchResponse.getHits().getTotalHits().value); + assertEquals(1, searchResponse.getHits().getTotalHits().value()); assertEquals(id, searchResponse.getHits().getHits()[0].getId()); assertEquals(sourceForDoc(num), searchResponse.getHits().getHits()[0].getSourceAsString()); } finally { @@ -456,7 +456,7 @@ public class OldRepositoryAccessIT extends ESRestTestCase { ); try { logger.info(searchResponse); - assertEquals(typeCount, searchResponse.getHits().getTotalHits().value); + assertEquals(typeCount, searchResponse.getHits().getTotalHits().value()); for (SearchHit hit : searchResponse.getHits().getHits()) { DocumentField typeField = hit.field("_type"); assertNotNull(typeField); @@ -482,7 +482,7 @@ public class OldRepositoryAccessIT extends ESRestTestCase { ); try { logger.info(searchResponse); - assertEquals(0, searchResponse.getHits().getTotalHits().value); + assertEquals(0, searchResponse.getHits().getTotalHits().value()); assertEquals(numberOfShards, searchResponse.getSuccessfulShards()); // When all shards are skipped, at least one of them is queried in order to provide a proper search response. assertEquals(numberOfShards - 1, searchResponse.getSkippedShards()); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java index dddba9b7b0fb..02dc679152bf 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java @@ -445,7 +445,7 @@ public class TokenBackwardsCompatibilityIT extends AbstractUpgradeTestCase { final SearchHits searchHits = response.getHits(); assertThat( "Search request used with size parameter that was too small to fetch all tokens.", - searchHits.getTotalHits().value, + searchHits.getTotalHits().value(), lessThanOrEqualTo(searchSize) ); final List tokenIds = Arrays.stream(searchHits.getHits()).map(searchHit -> {