mirror of
https://github.com/elastic/elasticsearch.git
synced 2025-04-25 07:37:19 -04:00
Upgrade to Lucene 10 (#114741)
The most relevant ES changes that upgrading to Lucene 10 requires are: - use the appropriate IOContext - Scorer / ScorerSupplier breaking changes - Regex automaton are no longer determinized by default - minimize moved to test classes - introduce Elasticsearch900Codec - adjust slicing code according to the added support for intra-segment concurrency - disable intra-segment concurrency in tests - adjust accessor methods for many Lucene classes that became a record - adapt to breaking changes in the analysis area Co-authored-by: Christoph Büscher <christophbuescher@posteo.de> Co-authored-by: Mayya Sharipova <mayya.sharipova@elastic.co> Co-authored-by: ChrisHegarty <chegar999@gmail.com> Co-authored-by: Brian Seeders <brian.seeders@elastic.co> Co-authored-by: Armin Braun <me@obrown.io> Co-authored-by: Panagiotis Bailis <pmpailis@gmail.com> Co-authored-by: Benjamin Trent <4357155+benwtrent@users.noreply.github.com>
This commit is contained in:
parent
671458a999
commit
8efd08b019
662 changed files with 8805 additions and 3640 deletions
|
@ -56,7 +56,6 @@ steps:
|
||||||
matrix:
|
matrix:
|
||||||
setup:
|
setup:
|
||||||
BWC_VERSION:
|
BWC_VERSION:
|
||||||
- 7.17.13
|
|
||||||
- 8.9.1
|
- 8.9.1
|
||||||
- 8.10.0
|
- 8.10.0
|
||||||
agents:
|
agents:
|
||||||
|
|
|
@ -19,7 +19,7 @@ import org.apache.lucene.store.IndexOutput;
|
||||||
import org.apache.lucene.store.MMapDirectory;
|
import org.apache.lucene.store.MMapDirectory;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
||||||
import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues;
|
import org.apache.lucene.util.quantization.QuantizedByteVectorValues;
|
||||||
import org.apache.lucene.util.quantization.ScalarQuantizer;
|
import org.apache.lucene.util.quantization.ScalarQuantizer;
|
||||||
import org.elasticsearch.common.logging.LogConfigurator;
|
import org.elasticsearch.common.logging.LogConfigurator;
|
||||||
import org.elasticsearch.core.IOUtils;
|
import org.elasticsearch.core.IOUtils;
|
||||||
|
@ -217,19 +217,17 @@ public class VectorScorerBenchmark {
|
||||||
return 1 / (1f + adjustedDistance);
|
return 1 / (1f + adjustedDistance);
|
||||||
}
|
}
|
||||||
|
|
||||||
RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException {
|
QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException {
|
||||||
var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7);
|
var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7);
|
||||||
var slice = in.slice("values", 0, in.length());
|
var slice = in.slice("values", 0, in.length());
|
||||||
return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice);
|
return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice);
|
||||||
}
|
}
|
||||||
|
|
||||||
RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim)
|
RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException {
|
||||||
throws IOException {
|
|
||||||
return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values);
|
return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values);
|
||||||
}
|
}
|
||||||
|
|
||||||
RandomVectorScorer luceneScorer(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec)
|
RandomVectorScorer luceneScorer(QuantizedByteVectorValues values, VectorSimilarityFunction sim, float[] queryVec) throws IOException {
|
||||||
throws IOException {
|
|
||||||
return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorer(sim, values, queryVec);
|
return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorer(sim, values, queryVec);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -59,10 +59,6 @@ org.apache.lucene.util.Version#parseLeniently(java.lang.String)
|
||||||
|
|
||||||
org.apache.lucene.index.NoMergePolicy#INSTANCE @ explicit use of NoMergePolicy risks forgetting to configure NoMergeScheduler; use org.elasticsearch.common.lucene.Lucene#indexWriterConfigWithNoMerging() instead.
|
org.apache.lucene.index.NoMergePolicy#INSTANCE @ explicit use of NoMergePolicy risks forgetting to configure NoMergeScheduler; use org.elasticsearch.common.lucene.Lucene#indexWriterConfigWithNoMerging() instead.
|
||||||
|
|
||||||
@defaultMessage Spawns a new thread which is solely under lucenes control use ThreadPool#relativeTimeInMillis instead
|
|
||||||
org.apache.lucene.search.TimeLimitingCollector#getGlobalTimerThread()
|
|
||||||
org.apache.lucene.search.TimeLimitingCollector#getGlobalCounter()
|
|
||||||
|
|
||||||
@defaultMessage Don't interrupt threads use FutureUtils#cancel(Future<T>) instead
|
@defaultMessage Don't interrupt threads use FutureUtils#cancel(Future<T>) instead
|
||||||
java.util.concurrent.Future#cancel(boolean)
|
java.util.concurrent.Future#cancel(boolean)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
elasticsearch = 9.0.0
|
elasticsearch = 9.0.0
|
||||||
lucene = 9.12.0
|
lucene = 10.0.0
|
||||||
|
|
||||||
bundled_jdk_vendor = openjdk
|
bundled_jdk_vendor = openjdk
|
||||||
bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4
|
bundled_jdk = 22.0.1+8@c7ec1332f7bb44aeba2eb341ae18aca4
|
||||||
|
|
|
@ -62,6 +62,9 @@
|
||||||
23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.setAsTypeCache
|
23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.setAsTypeCache
|
||||||
23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.asTypeUncached
|
23:-XX:CompileCommand=dontinline,java/lang/invoke/MethodHandle.asTypeUncached
|
||||||
|
|
||||||
|
# Lucene 10: apply MADV_NORMAL advice to enable more aggressive readahead
|
||||||
|
-Dorg.apache.lucene.store.defaultReadAdvice=normal
|
||||||
|
|
||||||
## heap dumps
|
## heap dumps
|
||||||
|
|
||||||
# generate a heap dump when an allocation from the Java heap fails; heap dumps
|
# generate a heap dump when an allocation from the Java heap fails; heap dumps
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
|
|
||||||
include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[]
|
include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[]
|
||||||
|
|
||||||
:lucene_version: 9.12.0
|
:lucene_version: 10.0.0
|
||||||
:lucene_version_path: 9_12_0
|
:lucene_version_path: 10_0_0
|
||||||
:jdk: 11.0.2
|
:jdk: 11.0.2
|
||||||
:jdk_major: 11
|
:jdk_major: 11
|
||||||
:build_type: tar
|
:build_type: tar
|
||||||
|
|
27
docs/changelog/113482.yaml
Normal file
27
docs/changelog/113482.yaml
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
pr: 113482
|
||||||
|
summary: The 'persian' analyzer has stemmer by default
|
||||||
|
area: Analysis
|
||||||
|
type: breaking
|
||||||
|
issues:
|
||||||
|
- 113050
|
||||||
|
breaking:
|
||||||
|
title: The 'persian' analyzer has stemmer by default
|
||||||
|
area: Analysis
|
||||||
|
details: >-
|
||||||
|
Lucene 10 has added a final stemming step to its PersianAnalyzer that Elasticsearch
|
||||||
|
exposes as 'persian' analyzer. Existing indices will keep the old
|
||||||
|
non-stemming behaviour while new indices will see the updated behaviour with
|
||||||
|
added stemming.
|
||||||
|
Users that wish to maintain the non-stemming behaviour need to define their
|
||||||
|
own analyzer as outlined in
|
||||||
|
https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer.
|
||||||
|
Users that wish to use the new stemming behaviour for existing indices will
|
||||||
|
have to reindex their data.
|
||||||
|
impact: >-
|
||||||
|
Indexing with the 'persian' analyzer will produce slightly different tokens.
|
||||||
|
Users should check if this impacts their search results. If they wish to
|
||||||
|
maintain the legacy non-stemming behaviour they can define their own
|
||||||
|
analyzer equivalent as explained in
|
||||||
|
https://www.elastic.co/guide/en/elasticsearch/reference/8.15/analysis-lang-analyzer.html#persian-analyzer.
|
||||||
|
notable: false
|
||||||
|
|
18
docs/changelog/113614.yaml
Normal file
18
docs/changelog/113614.yaml
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
pr: 113614
|
||||||
|
summary: The 'german2' stemmer is now an alias for the 'german' snowball stemmer
|
||||||
|
area: Analysis
|
||||||
|
type: breaking
|
||||||
|
issues: []
|
||||||
|
breaking:
|
||||||
|
title: The "german2" snowball stemmer is now an alias for the "german" stemmer
|
||||||
|
area: Analysis
|
||||||
|
details: >-
|
||||||
|
Lucene 10 has merged the improved "german2" snowball language stemmer with the
|
||||||
|
"german" stemmer. For Elasticsearch, "german2" is now a deprecated alias for
|
||||||
|
"german". This may results in slightly different tokens being generated for
|
||||||
|
terms with umlaut substitution (like "ue" for "ü" etc...)
|
||||||
|
impact: >-
|
||||||
|
Replace usages of "german2" with "german" in analysis configuration. Old
|
||||||
|
indices that use the "german" stemmer should be reindexed if possible.
|
||||||
|
notable: false
|
||||||
|
|
18
docs/changelog/114124.yaml
Normal file
18
docs/changelog/114124.yaml
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
pr: 114124
|
||||||
|
summary: The Korean dictionary for Nori has been updated
|
||||||
|
area: Analysis
|
||||||
|
type: breaking
|
||||||
|
issues: []
|
||||||
|
breaking:
|
||||||
|
title: The Korean dictionary for Nori has been updated
|
||||||
|
area: Analysis
|
||||||
|
details: >-
|
||||||
|
Lucene 10 ships with an updated Korean dictionary (mecab-ko-dic-2.1.1).
|
||||||
|
For details see https://github.com/apache/lucene/issues/11452. Users
|
||||||
|
experiencing changes in search behaviour on existing data are advised to
|
||||||
|
reindex.
|
||||||
|
impact: >-
|
||||||
|
The change is small and should generally provide better analysis results.
|
||||||
|
Existing indices for full-text use cases should be reindexed though.
|
||||||
|
notable: false
|
||||||
|
|
20
docs/changelog/114146.yaml
Normal file
20
docs/changelog/114146.yaml
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
pr: 114146
|
||||||
|
summary: Snowball stemmers have been upgraded
|
||||||
|
area: Analysis
|
||||||
|
type: breaking
|
||||||
|
issues: []
|
||||||
|
breaking:
|
||||||
|
title: Snowball stemmers have been upgraded
|
||||||
|
area: Analysis
|
||||||
|
details: >-
|
||||||
|
Lucene 10 ships with an upgrade of its Snowball stemmers.
|
||||||
|
For details see https://github.com/apache/lucene/issues/13209. Users using
|
||||||
|
Snowball stemmers that are experiencing changes in search behaviour on
|
||||||
|
existing data are advised to reindex.
|
||||||
|
impact: >-
|
||||||
|
The upgrade should generally provide improved stemming results. Small changes
|
||||||
|
in token analysis can lead to mismatches with previously index data, so
|
||||||
|
existing indices using Snowball stemmers as part of their analysis chain
|
||||||
|
should be reindexed.
|
||||||
|
notable: false
|
||||||
|
|
5
docs/changelog/114741.yaml
Normal file
5
docs/changelog/114741.yaml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
pr: 114741
|
||||||
|
summary: Upgrade to Lucene 10
|
||||||
|
area: Search
|
||||||
|
type: upgrade
|
||||||
|
issues: []
|
|
@ -244,11 +244,11 @@ Which responds with:
|
||||||
"end_offset": 3,
|
"end_offset": 3,
|
||||||
"type": "word",
|
"type": "word",
|
||||||
"position": 1,
|
"position": 1,
|
||||||
"leftPOS": "J(Ending Particle)",
|
"leftPOS": "JKS(Subject case marker)",
|
||||||
"morphemes": null,
|
"morphemes": null,
|
||||||
"posType": "MORPHEME",
|
"posType": "MORPHEME",
|
||||||
"reading": null,
|
"reading": null,
|
||||||
"rightPOS": "J(Ending Particle)"
|
"rightPOS": "JKS(Subject case marker)"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"token": "깊",
|
"token": "깊",
|
||||||
|
@ -268,11 +268,11 @@ Which responds with:
|
||||||
"end_offset": 6,
|
"end_offset": 6,
|
||||||
"type": "word",
|
"type": "word",
|
||||||
"position": 3,
|
"position": 3,
|
||||||
"leftPOS": "E(Verbal endings)",
|
"leftPOS": "ETM(Adnominal form transformative ending)",
|
||||||
"morphemes": null,
|
"morphemes": null,
|
||||||
"posType": "MORPHEME",
|
"posType": "MORPHEME",
|
||||||
"reading": null,
|
"reading": null,
|
||||||
"rightPOS": "E(Verbal endings)"
|
"rightPOS": "ETM(Adnominal form transformative ending)"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"token": "나무",
|
"token": "나무",
|
||||||
|
@ -292,11 +292,11 @@ Which responds with:
|
||||||
"end_offset": 10,
|
"end_offset": 10,
|
||||||
"type": "word",
|
"type": "word",
|
||||||
"position": 5,
|
"position": 5,
|
||||||
"leftPOS": "J(Ending Particle)",
|
"leftPOS": "JX(Auxiliary postpositional particle)",
|
||||||
"morphemes": null,
|
"morphemes": null,
|
||||||
"posType": "MORPHEME",
|
"posType": "MORPHEME",
|
||||||
"reading": null,
|
"reading": null,
|
||||||
"rightPOS": "J(Ending Particle)"
|
"rightPOS": "JX(Auxiliary postpositional particle)"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
|
@ -1430,7 +1430,8 @@ PUT /persian_example
|
||||||
"decimal_digit",
|
"decimal_digit",
|
||||||
"arabic_normalization",
|
"arabic_normalization",
|
||||||
"persian_normalization",
|
"persian_normalization",
|
||||||
"persian_stop"
|
"persian_stop",
|
||||||
|
"persian_stem"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -173,7 +173,6 @@ http://bvg.udc.es/recursos_lingua/stemming.jsp[`minimal_galician`] (Plural step
|
||||||
German::
|
German::
|
||||||
https://dl.acm.org/citation.cfm?id=1141523[*`light_german`*],
|
https://dl.acm.org/citation.cfm?id=1141523[*`light_german`*],
|
||||||
https://snowballstem.org/algorithms/german/stemmer.html[`german`],
|
https://snowballstem.org/algorithms/german/stemmer.html[`german`],
|
||||||
https://snowballstem.org/algorithms/german2/stemmer.html[`german2`],
|
|
||||||
http://members.unine.ch/jacques.savoy/clef/morpho.pdf[`minimal_german`]
|
http://members.unine.ch/jacques.savoy/clef/morpho.pdf[`minimal_german`]
|
||||||
|
|
||||||
Greek::
|
Greek::
|
||||||
|
|
|
@ -40,14 +40,14 @@ POST _analyze
|
||||||
"start_offset": 0,
|
"start_offset": 0,
|
||||||
"end_offset": 8,
|
"end_offset": 8,
|
||||||
"type": "word",
|
"type": "word",
|
||||||
"position": 0
|
"position": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"token": "/one/two/three",
|
"token": "/one/two/three",
|
||||||
"start_offset": 0,
|
"start_offset": 0,
|
||||||
"end_offset": 14,
|
"end_offset": 14,
|
||||||
"type": "word",
|
"type": "word",
|
||||||
"position": 0
|
"position": 2
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -144,14 +144,14 @@ POST my-index-000001/_analyze
|
||||||
"start_offset": 7,
|
"start_offset": 7,
|
||||||
"end_offset": 18,
|
"end_offset": 18,
|
||||||
"type": "word",
|
"type": "word",
|
||||||
"position": 0
|
"position": 1
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"token": "/three/four/five",
|
"token": "/three/four/five",
|
||||||
"start_offset": 7,
|
"start_offset": 7,
|
||||||
"end_offset": 23,
|
"end_offset": 23,
|
||||||
"type": "word",
|
"type": "word",
|
||||||
"position": 0
|
"position": 2
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -1298,7 +1298,7 @@ One of the `dfs.knn` sections for a shard looks like the following:
|
||||||
"query" : [
|
"query" : [
|
||||||
{
|
{
|
||||||
"type" : "DocAndScoreQuery",
|
"type" : "DocAndScoreQuery",
|
||||||
"description" : "DocAndScore[100]",
|
"description" : "DocAndScoreQuery[0,...][0.008961825,...],0.008961825",
|
||||||
"time_in_nanos" : 444414,
|
"time_in_nanos" : 444414,
|
||||||
"breakdown" : {
|
"breakdown" : {
|
||||||
"set_min_competitive_score_count" : 0,
|
"set_min_competitive_score_count" : 0,
|
||||||
|
|
|
@ -2824,129 +2824,129 @@
|
||||||
<sha256 value="015d5c229f3cd5c0ebf175c1da08d596d94043362ae9d92637d88848c90537c8" origin="Generated by Gradle"/>
|
<sha256 value="015d5c229f3cd5c0ebf175c1da08d596d94043362ae9d92637d88848c90537c8" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-analysis-common" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-analysis-common" version="10.0.0">
|
||||||
<artifact name="lucene-analysis-common-9.12.0.jar">
|
<artifact name="lucene-analysis-common-10.0.0.jar">
|
||||||
<sha256 value="8c79d8741f711cc6d7501805b03f7b5f505805d09cab1beb95f0be24b6d27655" origin="Generated by Gradle"/>
|
<sha256 value="af703beb0898514a1c2d8dd58ac94fa14a6275398288482e1b9cd85f07d56912" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-analysis-icu" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-analysis-icu" version="10.0.0">
|
||||||
<artifact name="lucene-analysis-icu-9.12.0.jar">
|
<artifact name="lucene-analysis-icu-10.0.0.jar">
|
||||||
<sha256 value="52e914cc2d50beec8af0d07f50e5dab803cb9ce8e675ffe43a2aae6102fe6e25" origin="Generated by Gradle"/>
|
<sha256 value="0c69ea85db9be3daf6ed692f01e8467ab9b8e8fcc3740169bfbb601e7c5d55e5" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-analysis-kuromoji" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-analysis-kuromoji" version="10.0.0">
|
||||||
<artifact name="lucene-analysis-kuromoji-9.12.0.jar">
|
<artifact name="lucene-analysis-kuromoji-10.0.0.jar">
|
||||||
<sha256 value="3da0bd68c68cfe518d0e87ea49af1a9560579bca6b9a7fdf642169cb76dd6887" origin="Generated by Gradle"/>
|
<sha256 value="c12cb66f91752c5aa4969ad1a7775d268e5dec3d5e6e78b107fad2a02e897aac" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-analysis-morfologik" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-analysis-morfologik" version="10.0.0">
|
||||||
<artifact name="lucene-analysis-morfologik-9.12.0.jar">
|
<artifact name="lucene-analysis-morfologik-10.0.0.jar">
|
||||||
<sha256 value="920358e33d806b5c3feb3566795714e1f36a616520bd2bd5da1e8b639c1f8549" origin="Generated by Gradle"/>
|
<sha256 value="99aa16783155c122833f61c84726eedce41b1b0778a861e84d853a3e5397fc6d" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-analysis-nori" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-analysis-nori" version="10.0.0">
|
||||||
<artifact name="lucene-analysis-nori-9.12.0.jar">
|
<artifact name="lucene-analysis-nori-10.0.0.jar">
|
||||||
<sha256 value="26cacbe1d75e138ac0ba1e392edb1877c10a422f98524bf51d5d77492272889d" origin="Generated by Gradle"/>
|
<sha256 value="5e80fa011ef360f2093fec157959d63af7766c13490f5d32fa3e82260edcf54e" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-analysis-phonetic" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-analysis-phonetic" version="10.0.0">
|
||||||
<artifact name="lucene-analysis-phonetic-9.12.0.jar">
|
<artifact name="lucene-analysis-phonetic-10.0.0.jar">
|
||||||
<sha256 value="4bb24414425e04ccd28c835e8dd7f52259185f8f671aea6432460b4b7afa5cfa" origin="Generated by Gradle"/>
|
<sha256 value="901320e21ce7a15548323be1628df9dc96d83ebca4e0bfa71b3d771bfff5a7bb" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-analysis-smartcn" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-analysis-smartcn" version="10.0.0">
|
||||||
<artifact name="lucene-analysis-smartcn-9.12.0.jar">
|
<artifact name="lucene-analysis-smartcn-10.0.0.jar">
|
||||||
<sha256 value="ffd68804d3387968784b7f52b7c14c7439fa474ffbf1b36d93fa1d2b04101f7a" origin="Generated by Gradle"/>
|
<sha256 value="4d41a52e24f16fb7e9d96371362bf5bba5ae308c2740753fd1ff5e938800c6b3" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-analysis-stempel" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-analysis-stempel" version="10.0.0">
|
||||||
<artifact name="lucene-analysis-stempel-9.12.0.jar">
|
<artifact name="lucene-analysis-stempel-10.0.0.jar">
|
||||||
<sha256 value="c4911b5697c0403b694aa84d3f63090067c74ffa8c95cb088d0b80e91166df4a" origin="Generated by Gradle"/>
|
<sha256 value="723ca9d879d341283cf2dbe3c6bdf5a13875165aaba6a197dc669152bf0c2047" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-backward-codecs" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-backward-codecs" version="10.0.0">
|
||||||
<artifact name="lucene-backward-codecs-9.12.0.jar">
|
<artifact name="lucene-backward-codecs-10.0.0.jar">
|
||||||
<sha256 value="a42c17f304c9bf1e8411ebdcf91baf2df2d00c7bfa13bca9e41ccf381d78cfba" origin="Generated by Gradle"/>
|
<sha256 value="a5439e70d0e0dbc6745354b30b3168eb39f5ea452cc7187a81d30f080f4e87e4" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-codecs" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-codecs" version="10.0.0">
|
||||||
<artifact name="lucene-codecs-9.12.0.jar">
|
<artifact name="lucene-codecs-10.0.0.jar">
|
||||||
<sha256 value="8173a45b87df23a6dd279916cf16b361c0dcfeff5927121b014d210b19f17555" origin="Generated by Gradle"/>
|
<sha256 value="517bd716e6773346b2a8e9f32a72c10b006c63ff6c27733d4808deb41cf01bda" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-core" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-core" version="10.0.0">
|
||||||
<artifact name="lucene-core-9.12.0.jar">
|
<artifact name="lucene-core-10.0.0.jar">
|
||||||
<sha256 value="6c7b774b75cd8f369e246f365a47caa54ae991cae6afa49c7f339e9921ca58a0" origin="Generated by Gradle"/>
|
<sha256 value="ab187a10018d1e889624ef9e60b6fff0349c6315965b5087bdad0243a735f941" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-expressions" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-expressions" version="10.0.0">
|
||||||
<artifact name="lucene-expressions-9.12.0.jar">
|
<artifact name="lucene-expressions-10.0.0.jar">
|
||||||
<sha256 value="9d14919a3d0f07420c1d57a68de52180ae2477d6255d7f693f44515a4fda9c9b" origin="Generated by Gradle"/>
|
<sha256 value="4b1b9ffaf2ca65ec57e4af52b76fcbfc975b31d5c836ba343ab471f7d884963e" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-facet" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-facet" version="10.0.0">
|
||||||
<artifact name="lucene-facet-9.12.0.jar">
|
<artifact name="lucene-facet-10.0.0.jar">
|
||||||
<sha256 value="3c07754f0a1f45ba42a637faa8568c18507defa6b77a1ed71af291cd4e7ac603" origin="Generated by Gradle"/>
|
<sha256 value="a760708d9ba1da12803d69a3027e5960b0cf8d6c05f7649716015dfe2a49ea77" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-grouping" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-grouping" version="10.0.0">
|
||||||
<artifact name="lucene-grouping-9.12.0.jar">
|
<artifact name="lucene-grouping-10.0.0.jar">
|
||||||
<sha256 value="ffc02d047f4f036c5a5f409baaf8e19abca165deccc3e57e7a422560f4de1219" origin="Generated by Gradle"/>
|
<sha256 value="311b875f88cebcdc93a4104d51302eb8c1c278dbcda8a4ae79467a634d4272dc" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-highlighter" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-highlighter" version="10.0.0">
|
||||||
<artifact name="lucene-highlighter-9.12.0.jar">
|
<artifact name="lucene-highlighter-10.0.0.jar">
|
||||||
<sha256 value="5094e1b38f02eb72500a4283ac0f654e2e81943c5359b629567dd7900c935a59" origin="Generated by Gradle"/>
|
<sha256 value="74a2e01e94b040dcaf4b5e989a0a262f2a6b41f41bc2b17e11c9bca3e2b239bb" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-join" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-join" version="10.0.0">
|
||||||
<artifact name="lucene-join-9.12.0.jar">
|
<artifact name="lucene-join-10.0.0.jar">
|
||||||
<sha256 value="c49cb1be843295b2c7158c03755f222416fad9915114bc7f74649d20f4fc43d6" origin="Generated by Gradle"/>
|
<sha256 value="19b8326e56693824bbb1335d29e378d3288cd97add5c61538b502fb8666dd033" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-memory" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-memory" version="10.0.0">
|
||||||
<artifact name="lucene-memory-9.12.0.jar">
|
<artifact name="lucene-memory-10.0.0.jar">
|
||||||
<sha256 value="fdbb9c9b3270ce06370d094f1a80dbbbf81b2638b55fa14cc817469092899f01" origin="Generated by Gradle"/>
|
<sha256 value="40eb3a15025bd54ac6715a7f0f68ad3d0130d8a1226398b2d8fd2c7b82538cb5" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-misc" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-misc" version="10.0.0">
|
||||||
<artifact name="lucene-misc-9.12.0.jar">
|
<artifact name="lucene-misc-10.0.0.jar">
|
||||||
<sha256 value="7989d094b4b168ce9ef2949e13f6bfe820829944e50831881be1adeddbe02c04" origin="Generated by Gradle"/>
|
<sha256 value="ef3217c4fe4a294b98a122d6e252eb1d43801201369850345d6d995f1fc7a7b7" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-queries" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-queries" version="10.0.0">
|
||||||
<artifact name="lucene-queries-9.12.0.jar">
|
<artifact name="lucene-queries-10.0.0.jar">
|
||||||
<sha256 value="3b4da6b1673e4edd141c7b86538cc61f6ff3386106f1f5e179c9fe2d30ea2fca" origin="Generated by Gradle"/>
|
<sha256 value="2b0372c2d19dac6fb0eb7bc9ccf2dfe7211954d7c82a77c00d15bb5c2ae4196d" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-queryparser" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-queryparser" version="10.0.0">
|
||||||
<artifact name="lucene-queryparser-9.12.0.jar">
|
<artifact name="lucene-queryparser-10.0.0.jar">
|
||||||
<sha256 value="fc866b7bbfc199436feb24be7dd5c575445342eb283bfe07dd519eab131d3675" origin="Generated by Gradle"/>
|
<sha256 value="29aff63cd8cd8ce6e8db1eeadf4b9634d5688ad1591866e23d6d9059afaf27a0" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-sandbox" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-sandbox" version="10.0.0">
|
||||||
<artifact name="lucene-sandbox-9.12.0.jar">
|
<artifact name="lucene-sandbox-10.0.0.jar">
|
||||||
<sha256 value="2262f39a241f435dba61f230e7528205c34f4bf0008faa57f0808476bb782996" origin="Generated by Gradle"/>
|
<sha256 value="58bd8fb5f700b270b9971762269ef209f4c6b9eeadc7ff907b7e15cf6463740a" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-spatial-extras" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-spatial-extras" version="10.0.0">
|
||||||
<artifact name="lucene-spatial-extras-9.12.0.jar">
|
<artifact name="lucene-spatial-extras-10.0.0.jar">
|
||||||
<sha256 value="ea41de3b2da573e488033b8b1c2033e17c27ce39957cf6acb8904297ae6e1747" origin="Generated by Gradle"/>
|
<sha256 value="e6e7708f51bd5a9ccd37774170a420478011c22e57b262242e207010962e8f01" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-spatial3d" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-spatial3d" version="10.0.0">
|
||||||
<artifact name="lucene-spatial3d-9.12.0.jar">
|
<artifact name="lucene-spatial3d-10.0.0.jar">
|
||||||
<sha256 value="4263e163937024838a8761618b1ce4eba06857adca0c9385d1ac014ec2937de6" origin="Generated by Gradle"/>
|
<sha256 value="594738cc1b97dfc7dcd856fb10d7ebbeacf9ef14aa658555e4298ae4b6794978" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-suggest" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-suggest" version="10.0.0">
|
||||||
<artifact name="lucene-suggest-9.12.0.jar">
|
<artifact name="lucene-suggest-10.0.0.jar">
|
||||||
<sha256 value="1606fb9c7cf57d88b2824916c00280c92e14bcde798a6b78b27e0db8f13227f0" origin="Generated by Gradle"/>
|
<sha256 value="662851cf788fd76d846d89c1c9243e956af767402275924bb538468e7aeb35bc" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.lucene" name="lucene-test-framework" version="9.12.0">
|
<component group="org.apache.lucene" name="lucene-test-framework" version="10.0.0">
|
||||||
<artifact name="lucene-test-framework-9.12.0.jar">
|
<artifact name="lucene-test-framework-10.0.0.jar">
|
||||||
<sha256 value="47d4e1f47ebd26117d47af8fbff471c116c571fd7d7bd6cb199d89f8454e5cf0" origin="Generated by Gradle"/>
|
<sha256 value="8bd7bd94136f5879a9748ab12c2b263a5519d8ce813bfc40d40f5bf086a7bebd" origin="Generated by Gradle"/>
|
||||||
</artifact>
|
</artifact>
|
||||||
</component>
|
</component>
|
||||||
<component group="org.apache.maven" name="maven-artifact" version="3.6.1">
|
<component group="org.apache.maven" name="maven-artifact" version="3.6.1">
|
||||||
|
|
|
@ -13,7 +13,7 @@ import org.apache.lucene.index.VectorSimilarityFunction;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
||||||
import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues;
|
import org.apache.lucene.util.quantization.QuantizedByteVectorValues;
|
||||||
|
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ public interface VectorScorerFactory {
|
||||||
Optional<RandomVectorScorerSupplier> getInt7SQVectorScorerSupplier(
|
Optional<RandomVectorScorerSupplier> getInt7SQVectorScorerSupplier(
|
||||||
VectorSimilarityType similarityType,
|
VectorSimilarityType similarityType,
|
||||||
IndexInput input,
|
IndexInput input,
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
QuantizedByteVectorValues values,
|
||||||
float scoreCorrectionConstant
|
float scoreCorrectionConstant
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -52,9 +52,5 @@ public interface VectorScorerFactory {
|
||||||
* @param queryVector the query vector
|
* @param queryVector the query vector
|
||||||
* @return an optional containing the vector scorer, or empty
|
* @return an optional containing the vector scorer, or empty
|
||||||
*/
|
*/
|
||||||
Optional<RandomVectorScorer> getInt7SQVectorScorer(
|
Optional<RandomVectorScorer> getInt7SQVectorScorer(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector);
|
||||||
VectorSimilarityFunction sim,
|
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
|
||||||
float[] queryVector
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@ import org.apache.lucene.index.VectorSimilarityFunction;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
||||||
import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues;
|
import org.apache.lucene.util.quantization.QuantizedByteVectorValues;
|
||||||
|
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
|
@ -25,7 +25,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory {
|
||||||
public Optional<RandomVectorScorerSupplier> getInt7SQVectorScorerSupplier(
|
public Optional<RandomVectorScorerSupplier> getInt7SQVectorScorerSupplier(
|
||||||
VectorSimilarityType similarityType,
|
VectorSimilarityType similarityType,
|
||||||
IndexInput input,
|
IndexInput input,
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
QuantizedByteVectorValues values,
|
||||||
float scoreCorrectionConstant
|
float scoreCorrectionConstant
|
||||||
) {
|
) {
|
||||||
throw new UnsupportedOperationException("should not reach here");
|
throw new UnsupportedOperationException("should not reach here");
|
||||||
|
@ -34,7 +34,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory {
|
||||||
@Override
|
@Override
|
||||||
public Optional<RandomVectorScorer> getInt7SQVectorScorer(
|
public Optional<RandomVectorScorer> getInt7SQVectorScorer(
|
||||||
VectorSimilarityFunction sim,
|
VectorSimilarityFunction sim,
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
QuantizedByteVectorValues values,
|
||||||
float[] queryVector
|
float[] queryVector
|
||||||
) {
|
) {
|
||||||
throw new UnsupportedOperationException("should not reach here");
|
throw new UnsupportedOperationException("should not reach here");
|
||||||
|
|
|
@ -15,7 +15,7 @@ import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.store.MemorySegmentAccessInput;
|
import org.apache.lucene.store.MemorySegmentAccessInput;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
||||||
import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues;
|
import org.apache.lucene.util.quantization.QuantizedByteVectorValues;
|
||||||
import org.elasticsearch.nativeaccess.NativeAccess;
|
import org.elasticsearch.nativeaccess.NativeAccess;
|
||||||
import org.elasticsearch.simdvec.internal.Int7SQVectorScorer;
|
import org.elasticsearch.simdvec.internal.Int7SQVectorScorer;
|
||||||
import org.elasticsearch.simdvec.internal.Int7SQVectorScorerSupplier.DotProductSupplier;
|
import org.elasticsearch.simdvec.internal.Int7SQVectorScorerSupplier.DotProductSupplier;
|
||||||
|
@ -38,7 +38,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory {
|
||||||
public Optional<RandomVectorScorerSupplier> getInt7SQVectorScorerSupplier(
|
public Optional<RandomVectorScorerSupplier> getInt7SQVectorScorerSupplier(
|
||||||
VectorSimilarityType similarityType,
|
VectorSimilarityType similarityType,
|
||||||
IndexInput input,
|
IndexInput input,
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
QuantizedByteVectorValues values,
|
||||||
float scoreCorrectionConstant
|
float scoreCorrectionConstant
|
||||||
) {
|
) {
|
||||||
input = FilterIndexInput.unwrapOnlyTest(input);
|
input = FilterIndexInput.unwrapOnlyTest(input);
|
||||||
|
@ -57,7 +57,7 @@ final class VectorScorerFactoryImpl implements VectorScorerFactory {
|
||||||
@Override
|
@Override
|
||||||
public Optional<RandomVectorScorer> getInt7SQVectorScorer(
|
public Optional<RandomVectorScorer> getInt7SQVectorScorer(
|
||||||
VectorSimilarityFunction sim,
|
VectorSimilarityFunction sim,
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
QuantizedByteVectorValues values,
|
||||||
float[] queryVector
|
float[] queryVector
|
||||||
) {
|
) {
|
||||||
return Int7SQVectorScorer.create(sim, values, queryVector);
|
return Int7SQVectorScorer.create(sim, values, queryVector);
|
||||||
|
|
|
@ -11,18 +11,14 @@ package org.elasticsearch.simdvec.internal;
|
||||||
|
|
||||||
import org.apache.lucene.index.VectorSimilarityFunction;
|
import org.apache.lucene.index.VectorSimilarityFunction;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
||||||
import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues;
|
import org.apache.lucene.util.quantization.QuantizedByteVectorValues;
|
||||||
|
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
|
||||||
public final class Int7SQVectorScorer {
|
public final class Int7SQVectorScorer {
|
||||||
|
|
||||||
// Unconditionally returns an empty optional on <= JDK 21, since the scorer is only supported on JDK 22+
|
// Unconditionally returns an empty optional on <= JDK 21, since the scorer is only supported on JDK 22+
|
||||||
public static Optional<RandomVectorScorer> create(
|
public static Optional<RandomVectorScorer> create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) {
|
||||||
VectorSimilarityFunction sim,
|
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
|
||||||
float[] queryVector
|
|
||||||
) {
|
|
||||||
return Optional.empty();
|
return Optional.empty();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@ package org.elasticsearch.simdvec.internal;
|
||||||
import org.apache.lucene.store.MemorySegmentAccessInput;
|
import org.apache.lucene.store.MemorySegmentAccessInput;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
||||||
import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues;
|
import org.apache.lucene.util.quantization.QuantizedByteVectorValues;
|
||||||
import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity;
|
import org.apache.lucene.util.quantization.ScalarQuantizedVectorSimilarity;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -31,12 +31,12 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS
|
||||||
final int maxOrd;
|
final int maxOrd;
|
||||||
final float scoreCorrectionConstant;
|
final float scoreCorrectionConstant;
|
||||||
final MemorySegmentAccessInput input;
|
final MemorySegmentAccessInput input;
|
||||||
final RandomAccessQuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds
|
final QuantizedByteVectorValues values; // to support ordToDoc/getAcceptOrds
|
||||||
final ScalarQuantizedVectorSimilarity fallbackScorer;
|
final ScalarQuantizedVectorSimilarity fallbackScorer;
|
||||||
|
|
||||||
protected Int7SQVectorScorerSupplier(
|
protected Int7SQVectorScorerSupplier(
|
||||||
MemorySegmentAccessInput input,
|
MemorySegmentAccessInput input,
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
QuantizedByteVectorValues values,
|
||||||
float scoreCorrectionConstant,
|
float scoreCorrectionConstant,
|
||||||
ScalarQuantizedVectorSimilarity fallbackScorer
|
ScalarQuantizedVectorSimilarity fallbackScorer
|
||||||
) {
|
) {
|
||||||
|
@ -104,11 +104,7 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS
|
||||||
|
|
||||||
public static final class EuclideanSupplier extends Int7SQVectorScorerSupplier {
|
public static final class EuclideanSupplier extends Int7SQVectorScorerSupplier {
|
||||||
|
|
||||||
public EuclideanSupplier(
|
public EuclideanSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) {
|
||||||
MemorySegmentAccessInput input,
|
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
|
||||||
float scoreCorrectionConstant
|
|
||||||
) {
|
|
||||||
super(input, values, scoreCorrectionConstant, fromVectorSimilarity(EUCLIDEAN, scoreCorrectionConstant, BITS));
|
super(input, values, scoreCorrectionConstant, fromVectorSimilarity(EUCLIDEAN, scoreCorrectionConstant, BITS));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -127,11 +123,7 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS
|
||||||
|
|
||||||
public static final class DotProductSupplier extends Int7SQVectorScorerSupplier {
|
public static final class DotProductSupplier extends Int7SQVectorScorerSupplier {
|
||||||
|
|
||||||
public DotProductSupplier(
|
public DotProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) {
|
||||||
MemorySegmentAccessInput input,
|
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
|
||||||
float scoreCorrectionConstant
|
|
||||||
) {
|
|
||||||
super(input, values, scoreCorrectionConstant, fromVectorSimilarity(DOT_PRODUCT, scoreCorrectionConstant, BITS));
|
super(input, values, scoreCorrectionConstant, fromVectorSimilarity(DOT_PRODUCT, scoreCorrectionConstant, BITS));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,11 +143,7 @@ public abstract sealed class Int7SQVectorScorerSupplier implements RandomVectorS
|
||||||
|
|
||||||
public static final class MaxInnerProductSupplier extends Int7SQVectorScorerSupplier {
|
public static final class MaxInnerProductSupplier extends Int7SQVectorScorerSupplier {
|
||||||
|
|
||||||
public MaxInnerProductSupplier(
|
public MaxInnerProductSupplier(MemorySegmentAccessInput input, QuantizedByteVectorValues values, float scoreCorrectionConstant) {
|
||||||
MemorySegmentAccessInput input,
|
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
|
||||||
float scoreCorrectionConstant
|
|
||||||
) {
|
|
||||||
super(input, values, scoreCorrectionConstant, fromVectorSimilarity(MAXIMUM_INNER_PRODUCT, scoreCorrectionConstant, BITS));
|
super(input, values, scoreCorrectionConstant, fromVectorSimilarity(MAXIMUM_INNER_PRODUCT, scoreCorrectionConstant, BITS));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ import org.apache.lucene.store.FilterIndexInput;
|
||||||
import org.apache.lucene.store.IndexInput;
|
import org.apache.lucene.store.IndexInput;
|
||||||
import org.apache.lucene.store.MemorySegmentAccessInput;
|
import org.apache.lucene.store.MemorySegmentAccessInput;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
||||||
import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues;
|
import org.apache.lucene.util.quantization.QuantizedByteVectorValues;
|
||||||
import org.apache.lucene.util.quantization.ScalarQuantizer;
|
import org.apache.lucene.util.quantization.ScalarQuantizer;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -35,11 +35,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr
|
||||||
byte[] scratch;
|
byte[] scratch;
|
||||||
|
|
||||||
/** Return an optional whose value, if present, is the scorer. Otherwise, an empty optional is returned. */
|
/** Return an optional whose value, if present, is the scorer. Otherwise, an empty optional is returned. */
|
||||||
public static Optional<RandomVectorScorer> create(
|
public static Optional<RandomVectorScorer> create(VectorSimilarityFunction sim, QuantizedByteVectorValues values, float[] queryVector) {
|
||||||
VectorSimilarityFunction sim,
|
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
|
||||||
float[] queryVector
|
|
||||||
) {
|
|
||||||
checkDimensions(queryVector.length, values.dimension());
|
checkDimensions(queryVector.length, values.dimension());
|
||||||
var input = values.getSlice();
|
var input = values.getSlice();
|
||||||
if (input == null) {
|
if (input == null) {
|
||||||
|
@ -63,12 +59,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
Int7SQVectorScorer(
|
Int7SQVectorScorer(MemorySegmentAccessInput input, QuantizedByteVectorValues values, byte[] queryVector, float queryCorrection) {
|
||||||
MemorySegmentAccessInput input,
|
|
||||||
RandomAccessQuantizedByteVectorValues values,
|
|
||||||
byte[] queryVector,
|
|
||||||
float queryCorrection
|
|
||||||
) {
|
|
||||||
super(values);
|
super(values);
|
||||||
this.input = input;
|
this.input = input;
|
||||||
assert queryVector.length == values.getVectorByteLength();
|
assert queryVector.length == values.getVectorByteLength();
|
||||||
|
@ -105,7 +96,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final class DotProductScorer extends Int7SQVectorScorer {
|
public static final class DotProductScorer extends Int7SQVectorScorer {
|
||||||
public DotProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) {
|
public DotProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) {
|
||||||
super(in, values, query, correction);
|
super(in, values, query, correction);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -122,7 +113,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final class EuclideanScorer extends Int7SQVectorScorer {
|
public static final class EuclideanScorer extends Int7SQVectorScorer {
|
||||||
public EuclideanScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float correction) {
|
public EuclideanScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float correction) {
|
||||||
super(in, values, query, correction);
|
super(in, values, query, correction);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,7 +127,7 @@ public abstract sealed class Int7SQVectorScorer extends RandomVectorScorer.Abstr
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final class MaxInnerProductScorer extends Int7SQVectorScorer {
|
public static final class MaxInnerProductScorer extends Int7SQVectorScorer {
|
||||||
public MaxInnerProductScorer(MemorySegmentAccessInput in, RandomAccessQuantizedByteVectorValues values, byte[] query, float corr) {
|
public MaxInnerProductScorer(MemorySegmentAccessInput in, QuantizedByteVectorValues values, byte[] query, float corr) {
|
||||||
super(in, values, query, corr);
|
super(in, values, query, corr);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ import org.apache.lucene.store.IndexOutput;
|
||||||
import org.apache.lucene.store.MMapDirectory;
|
import org.apache.lucene.store.MMapDirectory;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
import org.apache.lucene.util.hnsw.RandomVectorScorer;
|
||||||
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
import org.apache.lucene.util.hnsw.RandomVectorScorerSupplier;
|
||||||
import org.apache.lucene.util.quantization.RandomAccessQuantizedByteVectorValues;
|
import org.apache.lucene.util.quantization.QuantizedByteVectorValues;
|
||||||
import org.apache.lucene.util.quantization.ScalarQuantizer;
|
import org.apache.lucene.util.quantization.ScalarQuantizer;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -431,14 +431,13 @@ public class VectorScorerFactoryTests extends AbstractVectorTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
RandomAccessQuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException {
|
QuantizedByteVectorValues vectorValues(int dims, int size, IndexInput in, VectorSimilarityFunction sim) throws IOException {
|
||||||
var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7);
|
var sq = new ScalarQuantizer(0.1f, 0.9f, (byte) 7);
|
||||||
var slice = in.slice("values", 0, in.length());
|
var slice = in.slice("values", 0, in.length());
|
||||||
return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice);
|
return new OffHeapQuantizedByteVectorValues.DenseOffHeapVectorValues(dims, size, sq, false, sim, null, slice);
|
||||||
}
|
}
|
||||||
|
|
||||||
RandomVectorScorerSupplier luceneScoreSupplier(RandomAccessQuantizedByteVectorValues values, VectorSimilarityFunction sim)
|
RandomVectorScorerSupplier luceneScoreSupplier(QuantizedByteVectorValues values, VectorSimilarityFunction sim) throws IOException {
|
||||||
throws IOException {
|
|
||||||
return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values);
|
return new Lucene99ScalarQuantizedVectorScorer(null).getRandomVectorScorerSupplier(sim, values);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,3 +33,7 @@ dependencies {
|
||||||
artifacts {
|
artifacts {
|
||||||
restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test"))
|
restTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tasks.named("yamlRestCompatTestTransform").configure { task ->
|
||||||
|
task.replaceValueInMatch("tokens.0.token", "absenț", "romanian")
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,741 @@
|
||||||
|
/*
|
||||||
|
* @notice
|
||||||
|
* Generated by Snowball 2.0.0 - https://snowballstem.org/
|
||||||
|
*
|
||||||
|
* Modifications copyright (C) 2024 Elasticsearch B.V.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
|
import org.tartarus.snowball.Among;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class implements the stemming algorithm defined by a snowball script.
|
||||||
|
* NOTE: This is the RomanianStemmer used in Lucene 9 and should only be used for backwards compatibility
|
||||||
|
*/
|
||||||
|
@SuppressWarnings("checkstyle:DescendantToken")
|
||||||
|
class LegacyRomanianStemmer extends org.tartarus.snowball.SnowballStemmer {
|
||||||
|
|
||||||
|
private static final java.lang.invoke.MethodHandles.Lookup methodObject = java.lang.invoke.MethodHandles.lookup();
|
||||||
|
|
||||||
|
private static final Among a_0[] = { new Among("", -1, 3), new Among("I", 0, 1), new Among("U", 0, 2) };
|
||||||
|
|
||||||
|
private static final Among a_1[] = {
|
||||||
|
new Among("ea", -1, 3),
|
||||||
|
new Among("a\u0163ia", -1, 7),
|
||||||
|
new Among("aua", -1, 2),
|
||||||
|
new Among("iua", -1, 4),
|
||||||
|
new Among("a\u0163ie", -1, 7),
|
||||||
|
new Among("ele", -1, 3),
|
||||||
|
new Among("ile", -1, 5),
|
||||||
|
new Among("iile", 6, 4),
|
||||||
|
new Among("iei", -1, 4),
|
||||||
|
new Among("atei", -1, 6),
|
||||||
|
new Among("ii", -1, 4),
|
||||||
|
new Among("ului", -1, 1),
|
||||||
|
new Among("ul", -1, 1),
|
||||||
|
new Among("elor", -1, 3),
|
||||||
|
new Among("ilor", -1, 4),
|
||||||
|
new Among("iilor", 14, 4) };
|
||||||
|
|
||||||
|
private static final Among a_2[] = {
|
||||||
|
new Among("icala", -1, 4),
|
||||||
|
new Among("iciva", -1, 4),
|
||||||
|
new Among("ativa", -1, 5),
|
||||||
|
new Among("itiva", -1, 6),
|
||||||
|
new Among("icale", -1, 4),
|
||||||
|
new Among("a\u0163iune", -1, 5),
|
||||||
|
new Among("i\u0163iune", -1, 6),
|
||||||
|
new Among("atoare", -1, 5),
|
||||||
|
new Among("itoare", -1, 6),
|
||||||
|
new Among("\u0103toare", -1, 5),
|
||||||
|
new Among("icitate", -1, 4),
|
||||||
|
new Among("abilitate", -1, 1),
|
||||||
|
new Among("ibilitate", -1, 2),
|
||||||
|
new Among("ivitate", -1, 3),
|
||||||
|
new Among("icive", -1, 4),
|
||||||
|
new Among("ative", -1, 5),
|
||||||
|
new Among("itive", -1, 6),
|
||||||
|
new Among("icali", -1, 4),
|
||||||
|
new Among("atori", -1, 5),
|
||||||
|
new Among("icatori", 18, 4),
|
||||||
|
new Among("itori", -1, 6),
|
||||||
|
new Among("\u0103tori", -1, 5),
|
||||||
|
new Among("icitati", -1, 4),
|
||||||
|
new Among("abilitati", -1, 1),
|
||||||
|
new Among("ivitati", -1, 3),
|
||||||
|
new Among("icivi", -1, 4),
|
||||||
|
new Among("ativi", -1, 5),
|
||||||
|
new Among("itivi", -1, 6),
|
||||||
|
new Among("icit\u0103i", -1, 4),
|
||||||
|
new Among("abilit\u0103i", -1, 1),
|
||||||
|
new Among("ivit\u0103i", -1, 3),
|
||||||
|
new Among("icit\u0103\u0163i", -1, 4),
|
||||||
|
new Among("abilit\u0103\u0163i", -1, 1),
|
||||||
|
new Among("ivit\u0103\u0163i", -1, 3),
|
||||||
|
new Among("ical", -1, 4),
|
||||||
|
new Among("ator", -1, 5),
|
||||||
|
new Among("icator", 35, 4),
|
||||||
|
new Among("itor", -1, 6),
|
||||||
|
new Among("\u0103tor", -1, 5),
|
||||||
|
new Among("iciv", -1, 4),
|
||||||
|
new Among("ativ", -1, 5),
|
||||||
|
new Among("itiv", -1, 6),
|
||||||
|
new Among("ical\u0103", -1, 4),
|
||||||
|
new Among("iciv\u0103", -1, 4),
|
||||||
|
new Among("ativ\u0103", -1, 5),
|
||||||
|
new Among("itiv\u0103", -1, 6) };
|
||||||
|
|
||||||
|
private static final Among a_3[] = {
|
||||||
|
new Among("ica", -1, 1),
|
||||||
|
new Among("abila", -1, 1),
|
||||||
|
new Among("ibila", -1, 1),
|
||||||
|
new Among("oasa", -1, 1),
|
||||||
|
new Among("ata", -1, 1),
|
||||||
|
new Among("ita", -1, 1),
|
||||||
|
new Among("anta", -1, 1),
|
||||||
|
new Among("ista", -1, 3),
|
||||||
|
new Among("uta", -1, 1),
|
||||||
|
new Among("iva", -1, 1),
|
||||||
|
new Among("ic", -1, 1),
|
||||||
|
new Among("ice", -1, 1),
|
||||||
|
new Among("abile", -1, 1),
|
||||||
|
new Among("ibile", -1, 1),
|
||||||
|
new Among("isme", -1, 3),
|
||||||
|
new Among("iune", -1, 2),
|
||||||
|
new Among("oase", -1, 1),
|
||||||
|
new Among("ate", -1, 1),
|
||||||
|
new Among("itate", 17, 1),
|
||||||
|
new Among("ite", -1, 1),
|
||||||
|
new Among("ante", -1, 1),
|
||||||
|
new Among("iste", -1, 3),
|
||||||
|
new Among("ute", -1, 1),
|
||||||
|
new Among("ive", -1, 1),
|
||||||
|
new Among("ici", -1, 1),
|
||||||
|
new Among("abili", -1, 1),
|
||||||
|
new Among("ibili", -1, 1),
|
||||||
|
new Among("iuni", -1, 2),
|
||||||
|
new Among("atori", -1, 1),
|
||||||
|
new Among("osi", -1, 1),
|
||||||
|
new Among("ati", -1, 1),
|
||||||
|
new Among("itati", 30, 1),
|
||||||
|
new Among("iti", -1, 1),
|
||||||
|
new Among("anti", -1, 1),
|
||||||
|
new Among("isti", -1, 3),
|
||||||
|
new Among("uti", -1, 1),
|
||||||
|
new Among("i\u015Fti", -1, 3),
|
||||||
|
new Among("ivi", -1, 1),
|
||||||
|
new Among("it\u0103i", -1, 1),
|
||||||
|
new Among("o\u015Fi", -1, 1),
|
||||||
|
new Among("it\u0103\u0163i", -1, 1),
|
||||||
|
new Among("abil", -1, 1),
|
||||||
|
new Among("ibil", -1, 1),
|
||||||
|
new Among("ism", -1, 3),
|
||||||
|
new Among("ator", -1, 1),
|
||||||
|
new Among("os", -1, 1),
|
||||||
|
new Among("at", -1, 1),
|
||||||
|
new Among("it", -1, 1),
|
||||||
|
new Among("ant", -1, 1),
|
||||||
|
new Among("ist", -1, 3),
|
||||||
|
new Among("ut", -1, 1),
|
||||||
|
new Among("iv", -1, 1),
|
||||||
|
new Among("ic\u0103", -1, 1),
|
||||||
|
new Among("abil\u0103", -1, 1),
|
||||||
|
new Among("ibil\u0103", -1, 1),
|
||||||
|
new Among("oas\u0103", -1, 1),
|
||||||
|
new Among("at\u0103", -1, 1),
|
||||||
|
new Among("it\u0103", -1, 1),
|
||||||
|
new Among("ant\u0103", -1, 1),
|
||||||
|
new Among("ist\u0103", -1, 3),
|
||||||
|
new Among("ut\u0103", -1, 1),
|
||||||
|
new Among("iv\u0103", -1, 1) };
|
||||||
|
|
||||||
|
private static final Among a_4[] = {
|
||||||
|
new Among("ea", -1, 1),
|
||||||
|
new Among("ia", -1, 1),
|
||||||
|
new Among("esc", -1, 1),
|
||||||
|
new Among("\u0103sc", -1, 1),
|
||||||
|
new Among("ind", -1, 1),
|
||||||
|
new Among("\u00E2nd", -1, 1),
|
||||||
|
new Among("are", -1, 1),
|
||||||
|
new Among("ere", -1, 1),
|
||||||
|
new Among("ire", -1, 1),
|
||||||
|
new Among("\u00E2re", -1, 1),
|
||||||
|
new Among("se", -1, 2),
|
||||||
|
new Among("ase", 10, 1),
|
||||||
|
new Among("sese", 10, 2),
|
||||||
|
new Among("ise", 10, 1),
|
||||||
|
new Among("use", 10, 1),
|
||||||
|
new Among("\u00E2se", 10, 1),
|
||||||
|
new Among("e\u015Fte", -1, 1),
|
||||||
|
new Among("\u0103\u015Fte", -1, 1),
|
||||||
|
new Among("eze", -1, 1),
|
||||||
|
new Among("ai", -1, 1),
|
||||||
|
new Among("eai", 19, 1),
|
||||||
|
new Among("iai", 19, 1),
|
||||||
|
new Among("sei", -1, 2),
|
||||||
|
new Among("e\u015Fti", -1, 1),
|
||||||
|
new Among("\u0103\u015Fti", -1, 1),
|
||||||
|
new Among("ui", -1, 1),
|
||||||
|
new Among("ezi", -1, 1),
|
||||||
|
new Among("\u00E2i", -1, 1),
|
||||||
|
new Among("a\u015Fi", -1, 1),
|
||||||
|
new Among("se\u015Fi", -1, 2),
|
||||||
|
new Among("ase\u015Fi", 29, 1),
|
||||||
|
new Among("sese\u015Fi", 29, 2),
|
||||||
|
new Among("ise\u015Fi", 29, 1),
|
||||||
|
new Among("use\u015Fi", 29, 1),
|
||||||
|
new Among("\u00E2se\u015Fi", 29, 1),
|
||||||
|
new Among("i\u015Fi", -1, 1),
|
||||||
|
new Among("u\u015Fi", -1, 1),
|
||||||
|
new Among("\u00E2\u015Fi", -1, 1),
|
||||||
|
new Among("a\u0163i", -1, 2),
|
||||||
|
new Among("ea\u0163i", 38, 1),
|
||||||
|
new Among("ia\u0163i", 38, 1),
|
||||||
|
new Among("e\u0163i", -1, 2),
|
||||||
|
new Among("i\u0163i", -1, 2),
|
||||||
|
new Among("\u00E2\u0163i", -1, 2),
|
||||||
|
new Among("ar\u0103\u0163i", -1, 1),
|
||||||
|
new Among("ser\u0103\u0163i", -1, 2),
|
||||||
|
new Among("aser\u0103\u0163i", 45, 1),
|
||||||
|
new Among("seser\u0103\u0163i", 45, 2),
|
||||||
|
new Among("iser\u0103\u0163i", 45, 1),
|
||||||
|
new Among("user\u0103\u0163i", 45, 1),
|
||||||
|
new Among("\u00E2ser\u0103\u0163i", 45, 1),
|
||||||
|
new Among("ir\u0103\u0163i", -1, 1),
|
||||||
|
new Among("ur\u0103\u0163i", -1, 1),
|
||||||
|
new Among("\u00E2r\u0103\u0163i", -1, 1),
|
||||||
|
new Among("am", -1, 1),
|
||||||
|
new Among("eam", 54, 1),
|
||||||
|
new Among("iam", 54, 1),
|
||||||
|
new Among("em", -1, 2),
|
||||||
|
new Among("asem", 57, 1),
|
||||||
|
new Among("sesem", 57, 2),
|
||||||
|
new Among("isem", 57, 1),
|
||||||
|
new Among("usem", 57, 1),
|
||||||
|
new Among("\u00E2sem", 57, 1),
|
||||||
|
new Among("im", -1, 2),
|
||||||
|
new Among("\u00E2m", -1, 2),
|
||||||
|
new Among("\u0103m", -1, 2),
|
||||||
|
new Among("ar\u0103m", 65, 1),
|
||||||
|
new Among("ser\u0103m", 65, 2),
|
||||||
|
new Among("aser\u0103m", 67, 1),
|
||||||
|
new Among("seser\u0103m", 67, 2),
|
||||||
|
new Among("iser\u0103m", 67, 1),
|
||||||
|
new Among("user\u0103m", 67, 1),
|
||||||
|
new Among("\u00E2ser\u0103m", 67, 1),
|
||||||
|
new Among("ir\u0103m", 65, 1),
|
||||||
|
new Among("ur\u0103m", 65, 1),
|
||||||
|
new Among("\u00E2r\u0103m", 65, 1),
|
||||||
|
new Among("au", -1, 1),
|
||||||
|
new Among("eau", 76, 1),
|
||||||
|
new Among("iau", 76, 1),
|
||||||
|
new Among("indu", -1, 1),
|
||||||
|
new Among("\u00E2ndu", -1, 1),
|
||||||
|
new Among("ez", -1, 1),
|
||||||
|
new Among("easc\u0103", -1, 1),
|
||||||
|
new Among("ar\u0103", -1, 1),
|
||||||
|
new Among("ser\u0103", -1, 2),
|
||||||
|
new Among("aser\u0103", 84, 1),
|
||||||
|
new Among("seser\u0103", 84, 2),
|
||||||
|
new Among("iser\u0103", 84, 1),
|
||||||
|
new Among("user\u0103", 84, 1),
|
||||||
|
new Among("\u00E2ser\u0103", 84, 1),
|
||||||
|
new Among("ir\u0103", -1, 1),
|
||||||
|
new Among("ur\u0103", -1, 1),
|
||||||
|
new Among("\u00E2r\u0103", -1, 1),
|
||||||
|
new Among("eaz\u0103", -1, 1) };
|
||||||
|
|
||||||
|
private static final Among a_5[] = {
|
||||||
|
new Among("a", -1, 1),
|
||||||
|
new Among("e", -1, 1),
|
||||||
|
new Among("ie", 1, 1),
|
||||||
|
new Among("i", -1, 1),
|
||||||
|
new Among("\u0103", -1, 1) };
|
||||||
|
|
||||||
|
private static final char g_v[] = { 17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 32, 0, 0, 4 };
|
||||||
|
|
||||||
|
private boolean B_standard_suffix_removed;
|
||||||
|
private int I_p2;
|
||||||
|
private int I_p1;
|
||||||
|
private int I_pV;
|
||||||
|
|
||||||
|
private boolean r_prelude() {
|
||||||
|
while (true) {
|
||||||
|
int v_1 = cursor;
|
||||||
|
lab0: {
|
||||||
|
golab1: while (true) {
|
||||||
|
int v_2 = cursor;
|
||||||
|
lab2: {
|
||||||
|
if (!(in_grouping(g_v, 97, 259))) {
|
||||||
|
break lab2;
|
||||||
|
}
|
||||||
|
bra = cursor;
|
||||||
|
lab3: {
|
||||||
|
int v_3 = cursor;
|
||||||
|
lab4: {
|
||||||
|
if (!(eq_s("u"))) {
|
||||||
|
break lab4;
|
||||||
|
}
|
||||||
|
ket = cursor;
|
||||||
|
if (!(in_grouping(g_v, 97, 259))) {
|
||||||
|
break lab4;
|
||||||
|
}
|
||||||
|
slice_from("U");
|
||||||
|
break lab3;
|
||||||
|
}
|
||||||
|
cursor = v_3;
|
||||||
|
if (!(eq_s("i"))) {
|
||||||
|
break lab2;
|
||||||
|
}
|
||||||
|
ket = cursor;
|
||||||
|
if (!(in_grouping(g_v, 97, 259))) {
|
||||||
|
break lab2;
|
||||||
|
}
|
||||||
|
slice_from("I");
|
||||||
|
}
|
||||||
|
cursor = v_2;
|
||||||
|
break golab1;
|
||||||
|
}
|
||||||
|
cursor = v_2;
|
||||||
|
if (cursor >= limit) {
|
||||||
|
break lab0;
|
||||||
|
}
|
||||||
|
cursor++;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
cursor = v_1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean r_mark_regions() {
|
||||||
|
I_pV = limit;
|
||||||
|
I_p1 = limit;
|
||||||
|
I_p2 = limit;
|
||||||
|
int v_1 = cursor;
|
||||||
|
lab0: {
|
||||||
|
lab1: {
|
||||||
|
int v_2 = cursor;
|
||||||
|
lab2: {
|
||||||
|
if (!(in_grouping(g_v, 97, 259))) {
|
||||||
|
break lab2;
|
||||||
|
}
|
||||||
|
lab3: {
|
||||||
|
int v_3 = cursor;
|
||||||
|
lab4: {
|
||||||
|
if (!(out_grouping(g_v, 97, 259))) {
|
||||||
|
break lab4;
|
||||||
|
}
|
||||||
|
golab5: while (true) {
|
||||||
|
lab6: {
|
||||||
|
if (!(in_grouping(g_v, 97, 259))) {
|
||||||
|
break lab6;
|
||||||
|
}
|
||||||
|
break golab5;
|
||||||
|
}
|
||||||
|
if (cursor >= limit) {
|
||||||
|
break lab4;
|
||||||
|
}
|
||||||
|
cursor++;
|
||||||
|
}
|
||||||
|
break lab3;
|
||||||
|
}
|
||||||
|
cursor = v_3;
|
||||||
|
if (!(in_grouping(g_v, 97, 259))) {
|
||||||
|
break lab2;
|
||||||
|
}
|
||||||
|
golab7: while (true) {
|
||||||
|
lab8: {
|
||||||
|
if (!(out_grouping(g_v, 97, 259))) {
|
||||||
|
break lab8;
|
||||||
|
}
|
||||||
|
break golab7;
|
||||||
|
}
|
||||||
|
if (cursor >= limit) {
|
||||||
|
break lab2;
|
||||||
|
}
|
||||||
|
cursor++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break lab1;
|
||||||
|
}
|
||||||
|
cursor = v_2;
|
||||||
|
if (!(out_grouping(g_v, 97, 259))) {
|
||||||
|
break lab0;
|
||||||
|
}
|
||||||
|
lab9: {
|
||||||
|
int v_6 = cursor;
|
||||||
|
lab10: {
|
||||||
|
if (!(out_grouping(g_v, 97, 259))) {
|
||||||
|
break lab10;
|
||||||
|
}
|
||||||
|
golab11: while (true) {
|
||||||
|
lab12: {
|
||||||
|
if (!(in_grouping(g_v, 97, 259))) {
|
||||||
|
break lab12;
|
||||||
|
}
|
||||||
|
break golab11;
|
||||||
|
}
|
||||||
|
if (cursor >= limit) {
|
||||||
|
break lab10;
|
||||||
|
}
|
||||||
|
cursor++;
|
||||||
|
}
|
||||||
|
break lab9;
|
||||||
|
}
|
||||||
|
cursor = v_6;
|
||||||
|
if (!(in_grouping(g_v, 97, 259))) {
|
||||||
|
break lab0;
|
||||||
|
}
|
||||||
|
if (cursor >= limit) {
|
||||||
|
break lab0;
|
||||||
|
}
|
||||||
|
cursor++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
I_pV = cursor;
|
||||||
|
}
|
||||||
|
cursor = v_1;
|
||||||
|
int v_8 = cursor;
|
||||||
|
lab13: {
|
||||||
|
golab14: while (true) {
|
||||||
|
lab15: {
|
||||||
|
if (!(in_grouping(g_v, 97, 259))) {
|
||||||
|
break lab15;
|
||||||
|
}
|
||||||
|
break golab14;
|
||||||
|
}
|
||||||
|
if (cursor >= limit) {
|
||||||
|
break lab13;
|
||||||
|
}
|
||||||
|
cursor++;
|
||||||
|
}
|
||||||
|
golab16: while (true) {
|
||||||
|
lab17: {
|
||||||
|
if (!(out_grouping(g_v, 97, 259))) {
|
||||||
|
break lab17;
|
||||||
|
}
|
||||||
|
break golab16;
|
||||||
|
}
|
||||||
|
if (cursor >= limit) {
|
||||||
|
break lab13;
|
||||||
|
}
|
||||||
|
cursor++;
|
||||||
|
}
|
||||||
|
I_p1 = cursor;
|
||||||
|
golab18: while (true) {
|
||||||
|
lab19: {
|
||||||
|
if (!(in_grouping(g_v, 97, 259))) {
|
||||||
|
break lab19;
|
||||||
|
}
|
||||||
|
break golab18;
|
||||||
|
}
|
||||||
|
if (cursor >= limit) {
|
||||||
|
break lab13;
|
||||||
|
}
|
||||||
|
cursor++;
|
||||||
|
}
|
||||||
|
golab20: while (true) {
|
||||||
|
lab21: {
|
||||||
|
if (!(out_grouping(g_v, 97, 259))) {
|
||||||
|
break lab21;
|
||||||
|
}
|
||||||
|
break golab20;
|
||||||
|
}
|
||||||
|
if (cursor >= limit) {
|
||||||
|
break lab13;
|
||||||
|
}
|
||||||
|
cursor++;
|
||||||
|
}
|
||||||
|
I_p2 = cursor;
|
||||||
|
}
|
||||||
|
cursor = v_8;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean r_postlude() {
|
||||||
|
int among_var;
|
||||||
|
while (true) {
|
||||||
|
int v_1 = cursor;
|
||||||
|
lab0: {
|
||||||
|
bra = cursor;
|
||||||
|
among_var = find_among(a_0);
|
||||||
|
if (among_var == 0) {
|
||||||
|
break lab0;
|
||||||
|
}
|
||||||
|
ket = cursor;
|
||||||
|
switch (among_var) {
|
||||||
|
case 1:
|
||||||
|
slice_from("i");
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
slice_from("u");
|
||||||
|
break;
|
||||||
|
case 3:
|
||||||
|
if (cursor >= limit) {
|
||||||
|
break lab0;
|
||||||
|
}
|
||||||
|
cursor++;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
cursor = v_1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean r_RV() {
|
||||||
|
if (!(I_pV <= cursor)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean r_R1() {
|
||||||
|
if (!(I_p1 <= cursor)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean r_R2() {
|
||||||
|
if (!(I_p2 <= cursor)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean r_step_0() {
|
||||||
|
int among_var;
|
||||||
|
ket = cursor;
|
||||||
|
among_var = find_among_b(a_1);
|
||||||
|
if (among_var == 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
bra = cursor;
|
||||||
|
if (!r_R1()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
switch (among_var) {
|
||||||
|
case 1:
|
||||||
|
slice_del();
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
slice_from("a");
|
||||||
|
break;
|
||||||
|
case 3:
|
||||||
|
slice_from("e");
|
||||||
|
break;
|
||||||
|
case 4:
|
||||||
|
slice_from("i");
|
||||||
|
break;
|
||||||
|
case 5: {
|
||||||
|
int v_1 = limit - cursor;
|
||||||
|
lab0: {
|
||||||
|
if (!(eq_s_b("ab"))) {
|
||||||
|
break lab0;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
cursor = limit - v_1;
|
||||||
|
}
|
||||||
|
slice_from("i");
|
||||||
|
break;
|
||||||
|
case 6:
|
||||||
|
slice_from("at");
|
||||||
|
break;
|
||||||
|
case 7:
|
||||||
|
slice_from("a\u0163i");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean r_combo_suffix() {
|
||||||
|
int among_var;
|
||||||
|
int v_1 = limit - cursor;
|
||||||
|
ket = cursor;
|
||||||
|
among_var = find_among_b(a_2);
|
||||||
|
if (among_var == 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
bra = cursor;
|
||||||
|
if (!r_R1()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
switch (among_var) {
|
||||||
|
case 1:
|
||||||
|
slice_from("abil");
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
slice_from("ibil");
|
||||||
|
break;
|
||||||
|
case 3:
|
||||||
|
slice_from("iv");
|
||||||
|
break;
|
||||||
|
case 4:
|
||||||
|
slice_from("ic");
|
||||||
|
break;
|
||||||
|
case 5:
|
||||||
|
slice_from("at");
|
||||||
|
break;
|
||||||
|
case 6:
|
||||||
|
slice_from("it");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
B_standard_suffix_removed = true;
|
||||||
|
cursor = limit - v_1;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean r_standard_suffix() {
|
||||||
|
int among_var;
|
||||||
|
B_standard_suffix_removed = false;
|
||||||
|
while (true) {
|
||||||
|
int v_1 = limit - cursor;
|
||||||
|
lab0: {
|
||||||
|
if (!r_combo_suffix()) {
|
||||||
|
break lab0;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
cursor = limit - v_1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
ket = cursor;
|
||||||
|
among_var = find_among_b(a_3);
|
||||||
|
if (among_var == 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
bra = cursor;
|
||||||
|
if (!r_R2()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
switch (among_var) {
|
||||||
|
case 1:
|
||||||
|
slice_del();
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
if (!(eq_s_b("\u0163"))) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
bra = cursor;
|
||||||
|
slice_from("t");
|
||||||
|
break;
|
||||||
|
case 3:
|
||||||
|
slice_from("ist");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
B_standard_suffix_removed = true;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean r_verb_suffix() {
|
||||||
|
int among_var;
|
||||||
|
if (cursor < I_pV) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
int v_2 = limit_backward;
|
||||||
|
limit_backward = I_pV;
|
||||||
|
ket = cursor;
|
||||||
|
among_var = find_among_b(a_4);
|
||||||
|
if (among_var == 0) {
|
||||||
|
limit_backward = v_2;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
bra = cursor;
|
||||||
|
switch (among_var) {
|
||||||
|
case 1:
|
||||||
|
lab0: {
|
||||||
|
int v_3 = limit - cursor;
|
||||||
|
lab1: {
|
||||||
|
if (!(out_grouping_b(g_v, 97, 259))) {
|
||||||
|
break lab1;
|
||||||
|
}
|
||||||
|
break lab0;
|
||||||
|
}
|
||||||
|
cursor = limit - v_3;
|
||||||
|
if (!(eq_s_b("u"))) {
|
||||||
|
limit_backward = v_2;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
slice_del();
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
slice_del();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
limit_backward = v_2;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean r_vowel_suffix() {
|
||||||
|
ket = cursor;
|
||||||
|
if (find_among_b(a_5) == 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
bra = cursor;
|
||||||
|
if (!r_RV()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
slice_del();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean stem() {
|
||||||
|
int v_1 = cursor;
|
||||||
|
r_prelude();
|
||||||
|
cursor = v_1;
|
||||||
|
r_mark_regions();
|
||||||
|
limit_backward = cursor;
|
||||||
|
cursor = limit;
|
||||||
|
int v_3 = limit - cursor;
|
||||||
|
r_step_0();
|
||||||
|
cursor = limit - v_3;
|
||||||
|
int v_4 = limit - cursor;
|
||||||
|
r_standard_suffix();
|
||||||
|
cursor = limit - v_4;
|
||||||
|
int v_5 = limit - cursor;
|
||||||
|
lab0: {
|
||||||
|
lab1: {
|
||||||
|
int v_6 = limit - cursor;
|
||||||
|
lab2: {
|
||||||
|
if (!(B_standard_suffix_removed)) {
|
||||||
|
break lab2;
|
||||||
|
}
|
||||||
|
break lab1;
|
||||||
|
}
|
||||||
|
cursor = limit - v_6;
|
||||||
|
if (!r_verb_suffix()) {
|
||||||
|
break lab0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cursor = limit - v_5;
|
||||||
|
int v_7 = limit - cursor;
|
||||||
|
r_vowel_suffix();
|
||||||
|
cursor = limit - v_7;
|
||||||
|
cursor = limit_backward;
|
||||||
|
int v_8 = cursor;
|
||||||
|
r_postlude();
|
||||||
|
cursor = v_8;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
return o instanceof LegacyRomanianStemmer;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return LegacyRomanianStemmer.class.getName().hashCode();
|
||||||
|
}
|
||||||
|
}
|
|
@ -9,24 +9,72 @@
|
||||||
|
|
||||||
package org.elasticsearch.analysis.common;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
|
import org.apache.lucene.analysis.LowerCaseFilter;
|
||||||
|
import org.apache.lucene.analysis.StopFilter;
|
||||||
|
import org.apache.lucene.analysis.StopwordAnalyzerBase;
|
||||||
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
import org.apache.lucene.analysis.Tokenizer;
|
||||||
|
import org.apache.lucene.analysis.ar.ArabicNormalizationFilter;
|
||||||
|
import org.apache.lucene.analysis.core.DecimalDigitFilter;
|
||||||
import org.apache.lucene.analysis.fa.PersianAnalyzer;
|
import org.apache.lucene.analysis.fa.PersianAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.fa.PersianCharFilter;
|
||||||
|
import org.apache.lucene.analysis.fa.PersianNormalizationFilter;
|
||||||
|
import org.apache.lucene.analysis.standard.StandardTokenizer;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
import org.elasticsearch.index.IndexVersions;
|
||||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||||
import org.elasticsearch.index.analysis.Analysis;
|
import org.elasticsearch.index.analysis.Analysis;
|
||||||
|
|
||||||
public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider<PersianAnalyzer> {
|
import java.io.Reader;
|
||||||
|
|
||||||
private final PersianAnalyzer analyzer;
|
public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider<StopwordAnalyzerBase> {
|
||||||
|
|
||||||
|
private final StopwordAnalyzerBase analyzer;
|
||||||
|
|
||||||
PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||||
super(name, settings);
|
super(name, settings);
|
||||||
|
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) {
|
||||||
|
// since Lucene 10 this analyzer contains stemming by default
|
||||||
analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet()));
|
analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet()));
|
||||||
|
} else {
|
||||||
|
// for older index versions we need the old analyzer behaviour without stemming
|
||||||
|
analyzer = new StopwordAnalyzerBase(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet())) {
|
||||||
|
|
||||||
|
protected Analyzer.TokenStreamComponents createComponents(String fieldName) {
|
||||||
|
final Tokenizer source = new StandardTokenizer();
|
||||||
|
TokenStream result = new LowerCaseFilter(source);
|
||||||
|
result = new DecimalDigitFilter(result);
|
||||||
|
result = new ArabicNormalizationFilter(result);
|
||||||
|
/* additional persian-specific normalization */
|
||||||
|
result = new PersianNormalizationFilter(result);
|
||||||
|
/*
|
||||||
|
* the order here is important: the stopword list is normalized with the
|
||||||
|
* above!
|
||||||
|
*/
|
||||||
|
return new TokenStreamComponents(source, new StopFilter(result, stopwords));
|
||||||
|
}
|
||||||
|
|
||||||
|
protected TokenStream normalize(String fieldName, TokenStream in) {
|
||||||
|
TokenStream result = new LowerCaseFilter(in);
|
||||||
|
result = new DecimalDigitFilter(result);
|
||||||
|
result = new ArabicNormalizationFilter(result);
|
||||||
|
/* additional persian-specific normalization */
|
||||||
|
result = new PersianNormalizationFilter(result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected Reader initReader(String fieldName, Reader reader) {
|
||||||
|
return new PersianCharFilter(reader);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public PersianAnalyzer get() {
|
public StopwordAnalyzerBase get() {
|
||||||
return this.analyzer;
|
return this.analyzer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,28 +9,60 @@
|
||||||
|
|
||||||
package org.elasticsearch.analysis.common;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.CharArraySet;
|
import org.apache.lucene.analysis.CharArraySet;
|
||||||
|
import org.apache.lucene.analysis.StopwordAnalyzerBase;
|
||||||
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
|
import org.apache.lucene.analysis.Tokenizer;
|
||||||
|
import org.apache.lucene.analysis.core.LowerCaseFilter;
|
||||||
|
import org.apache.lucene.analysis.core.StopFilter;
|
||||||
|
import org.apache.lucene.analysis.miscellaneous.SetKeywordMarkerFilter;
|
||||||
import org.apache.lucene.analysis.ro.RomanianAnalyzer;
|
import org.apache.lucene.analysis.ro.RomanianAnalyzer;
|
||||||
|
import org.apache.lucene.analysis.snowball.SnowballFilter;
|
||||||
|
import org.apache.lucene.analysis.standard.StandardTokenizer;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
import org.elasticsearch.index.IndexVersions;
|
||||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||||
import org.elasticsearch.index.analysis.Analysis;
|
import org.elasticsearch.index.analysis.Analysis;
|
||||||
|
|
||||||
public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider<RomanianAnalyzer> {
|
public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider<StopwordAnalyzerBase> {
|
||||||
|
|
||||||
private final RomanianAnalyzer analyzer;
|
private final StopwordAnalyzerBase analyzer;
|
||||||
|
|
||||||
RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||||
super(name, settings);
|
super(name, settings);
|
||||||
analyzer = new RomanianAnalyzer(
|
CharArraySet stopwords = Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet());
|
||||||
Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()),
|
CharArraySet stemExclusionSet = Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET);
|
||||||
Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET)
|
if (indexSettings.getIndexVersionCreated().onOrAfter(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)) {
|
||||||
);
|
// since Lucene 10, this analyzer a modern unicode form and normalizes cedilla forms to forms with commas
|
||||||
|
analyzer = new RomanianAnalyzer(stopwords, stemExclusionSet);
|
||||||
|
} else {
|
||||||
|
// for older index versions we need the old behaviour without normalization
|
||||||
|
analyzer = new StopwordAnalyzerBase(Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet())) {
|
||||||
|
|
||||||
|
protected Analyzer.TokenStreamComponents createComponents(String fieldName) {
|
||||||
|
final Tokenizer source = new StandardTokenizer();
|
||||||
|
TokenStream result = new LowerCaseFilter(source);
|
||||||
|
result = new StopFilter(result, stopwords);
|
||||||
|
if (stemExclusionSet.isEmpty() == false) {
|
||||||
|
result = new SetKeywordMarkerFilter(result, stemExclusionSet);
|
||||||
|
}
|
||||||
|
result = new SnowballFilter(result, new LegacyRomanianStemmer());
|
||||||
|
return new TokenStreamComponents(source, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected TokenStream normalize(String fieldName, TokenStream in) {
|
||||||
|
return new LowerCaseFilter(in);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public RomanianAnalyzer get() {
|
public StopwordAnalyzerBase get() {
|
||||||
return this.analyzer;
|
return this.analyzer;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
|
|
||||||
package org.elasticsearch.analysis.common;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.TokenFilter;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.analysis.ar.ArabicStemFilter;
|
import org.apache.lucene.analysis.ar.ArabicStemFilter;
|
||||||
import org.apache.lucene.analysis.bg.BulgarianStemFilter;
|
import org.apache.lucene.analysis.bg.BulgarianStemFilter;
|
||||||
|
@ -38,8 +39,9 @@ import org.apache.lucene.analysis.it.ItalianLightStemFilter;
|
||||||
import org.apache.lucene.analysis.lv.LatvianStemFilter;
|
import org.apache.lucene.analysis.lv.LatvianStemFilter;
|
||||||
import org.apache.lucene.analysis.miscellaneous.EmptyTokenStream;
|
import org.apache.lucene.analysis.miscellaneous.EmptyTokenStream;
|
||||||
import org.apache.lucene.analysis.no.NorwegianLightStemFilter;
|
import org.apache.lucene.analysis.no.NorwegianLightStemFilter;
|
||||||
import org.apache.lucene.analysis.no.NorwegianLightStemmer;
|
import org.apache.lucene.analysis.no.NorwegianLightStemFilterFactory;
|
||||||
import org.apache.lucene.analysis.no.NorwegianMinimalStemFilter;
|
import org.apache.lucene.analysis.no.NorwegianMinimalStemFilter;
|
||||||
|
import org.apache.lucene.analysis.no.NorwegianMinimalStemFilterFactory;
|
||||||
import org.apache.lucene.analysis.pt.PortugueseLightStemFilter;
|
import org.apache.lucene.analysis.pt.PortugueseLightStemFilter;
|
||||||
import org.apache.lucene.analysis.pt.PortugueseMinimalStemFilter;
|
import org.apache.lucene.analysis.pt.PortugueseMinimalStemFilter;
|
||||||
import org.apache.lucene.analysis.pt.PortugueseStemFilter;
|
import org.apache.lucene.analysis.pt.PortugueseStemFilter;
|
||||||
|
@ -62,14 +64,11 @@ import org.tartarus.snowball.ext.EnglishStemmer;
|
||||||
import org.tartarus.snowball.ext.EstonianStemmer;
|
import org.tartarus.snowball.ext.EstonianStemmer;
|
||||||
import org.tartarus.snowball.ext.FinnishStemmer;
|
import org.tartarus.snowball.ext.FinnishStemmer;
|
||||||
import org.tartarus.snowball.ext.FrenchStemmer;
|
import org.tartarus.snowball.ext.FrenchStemmer;
|
||||||
import org.tartarus.snowball.ext.German2Stemmer;
|
|
||||||
import org.tartarus.snowball.ext.GermanStemmer;
|
import org.tartarus.snowball.ext.GermanStemmer;
|
||||||
import org.tartarus.snowball.ext.HungarianStemmer;
|
import org.tartarus.snowball.ext.HungarianStemmer;
|
||||||
import org.tartarus.snowball.ext.IrishStemmer;
|
import org.tartarus.snowball.ext.IrishStemmer;
|
||||||
import org.tartarus.snowball.ext.ItalianStemmer;
|
import org.tartarus.snowball.ext.ItalianStemmer;
|
||||||
import org.tartarus.snowball.ext.KpStemmer;
|
|
||||||
import org.tartarus.snowball.ext.LithuanianStemmer;
|
import org.tartarus.snowball.ext.LithuanianStemmer;
|
||||||
import org.tartarus.snowball.ext.LovinsStemmer;
|
|
||||||
import org.tartarus.snowball.ext.NorwegianStemmer;
|
import org.tartarus.snowball.ext.NorwegianStemmer;
|
||||||
import org.tartarus.snowball.ext.PortugueseStemmer;
|
import org.tartarus.snowball.ext.PortugueseStemmer;
|
||||||
import org.tartarus.snowball.ext.RomanianStemmer;
|
import org.tartarus.snowball.ext.RomanianStemmer;
|
||||||
|
@ -80,6 +79,7 @@ import org.tartarus.snowball.ext.SwedishStemmer;
|
||||||
import org.tartarus.snowball.ext.TurkishStemmer;
|
import org.tartarus.snowball.ext.TurkishStemmer;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
|
||||||
public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
|
public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||||
|
|
||||||
|
@ -87,27 +87,15 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||||
|
|
||||||
private static final TokenStream EMPTY_TOKEN_STREAM = new EmptyTokenStream();
|
private static final TokenStream EMPTY_TOKEN_STREAM = new EmptyTokenStream();
|
||||||
|
|
||||||
private String language;
|
private final String language;
|
||||||
|
|
||||||
|
private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(StemmerTokenFilterFactory.class);
|
||||||
|
|
||||||
StemmerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {
|
StemmerTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {
|
||||||
super(name, settings);
|
super(name, settings);
|
||||||
this.language = Strings.capitalize(settings.get("language", settings.get("name", "porter")));
|
this.language = Strings.capitalize(settings.get("language", settings.get("name", "porter")));
|
||||||
// check that we have a valid language by trying to create a TokenStream
|
// check that we have a valid language by trying to create a TokenStream
|
||||||
create(EMPTY_TOKEN_STREAM).close();
|
create(EMPTY_TOKEN_STREAM).close();
|
||||||
if ("lovins".equalsIgnoreCase(language)) {
|
|
||||||
deprecationLogger.critical(
|
|
||||||
DeprecationCategory.ANALYSIS,
|
|
||||||
"lovins_deprecation",
|
|
||||||
"The [lovins] stemmer is deprecated and will be removed in a future version."
|
|
||||||
);
|
|
||||||
}
|
|
||||||
if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) {
|
|
||||||
deprecationLogger.critical(
|
|
||||||
DeprecationCategory.ANALYSIS,
|
|
||||||
"dutch_kp_deprecation",
|
|
||||||
"The [dutch_kp] stemmer is deprecated and will be removed in a future version."
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -135,8 +123,17 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||||
} else if ("dutch".equalsIgnoreCase(language)) {
|
} else if ("dutch".equalsIgnoreCase(language)) {
|
||||||
return new SnowballFilter(tokenStream, new DutchStemmer());
|
return new SnowballFilter(tokenStream, new DutchStemmer());
|
||||||
} else if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) {
|
} else if ("dutch_kp".equalsIgnoreCase(language) || "dutchKp".equalsIgnoreCase(language) || "kp".equalsIgnoreCase(language)) {
|
||||||
return new SnowballFilter(tokenStream, new KpStemmer());
|
deprecationLogger.critical(
|
||||||
|
DeprecationCategory.ANALYSIS,
|
||||||
|
"dutch_kp_deprecation",
|
||||||
|
"The [dutch_kp] stemmer is deprecated and will be removed in a future version."
|
||||||
|
);
|
||||||
|
return new TokenFilter(tokenStream) {
|
||||||
|
@Override
|
||||||
|
public boolean incrementToken() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
// English stemmers
|
// English stemmers
|
||||||
} else if ("english".equalsIgnoreCase(language)) {
|
} else if ("english".equalsIgnoreCase(language)) {
|
||||||
return new PorterStemFilter(tokenStream);
|
return new PorterStemFilter(tokenStream);
|
||||||
|
@ -145,7 +142,17 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||||
|| "kstem".equalsIgnoreCase(language)) {
|
|| "kstem".equalsIgnoreCase(language)) {
|
||||||
return new KStemFilter(tokenStream);
|
return new KStemFilter(tokenStream);
|
||||||
} else if ("lovins".equalsIgnoreCase(language)) {
|
} else if ("lovins".equalsIgnoreCase(language)) {
|
||||||
return new SnowballFilter(tokenStream, new LovinsStemmer());
|
deprecationLogger.critical(
|
||||||
|
DeprecationCategory.ANALYSIS,
|
||||||
|
"lovins_deprecation",
|
||||||
|
"The [lovins] stemmer is deprecated and will be removed in a future version."
|
||||||
|
);
|
||||||
|
return new TokenFilter(tokenStream) {
|
||||||
|
@Override
|
||||||
|
public boolean incrementToken() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
} else if ("porter".equalsIgnoreCase(language)) {
|
} else if ("porter".equalsIgnoreCase(language)) {
|
||||||
return new PorterStemFilter(tokenStream);
|
return new PorterStemFilter(tokenStream);
|
||||||
} else if ("porter2".equalsIgnoreCase(language)) {
|
} else if ("porter2".equalsIgnoreCase(language)) {
|
||||||
|
@ -185,7 +192,13 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||||
} else if ("german".equalsIgnoreCase(language)) {
|
} else if ("german".equalsIgnoreCase(language)) {
|
||||||
return new SnowballFilter(tokenStream, new GermanStemmer());
|
return new SnowballFilter(tokenStream, new GermanStemmer());
|
||||||
} else if ("german2".equalsIgnoreCase(language)) {
|
} else if ("german2".equalsIgnoreCase(language)) {
|
||||||
return new SnowballFilter(tokenStream, new German2Stemmer());
|
DEPRECATION_LOGGER.critical(
|
||||||
|
DeprecationCategory.ANALYSIS,
|
||||||
|
"german2_stemmer_deprecation",
|
||||||
|
"The 'german2' stemmer has been deprecated and folded into the 'german' Stemmer. "
|
||||||
|
+ "Replace all usages of 'german2' with 'german'."
|
||||||
|
);
|
||||||
|
return new SnowballFilter(tokenStream, new GermanStemmer());
|
||||||
} else if ("light_german".equalsIgnoreCase(language) || "lightGerman".equalsIgnoreCase(language)) {
|
} else if ("light_german".equalsIgnoreCase(language) || "lightGerman".equalsIgnoreCase(language)) {
|
||||||
return new GermanLightStemFilter(tokenStream);
|
return new GermanLightStemFilter(tokenStream);
|
||||||
} else if ("minimal_german".equalsIgnoreCase(language) || "minimalGerman".equalsIgnoreCase(language)) {
|
} else if ("minimal_german".equalsIgnoreCase(language) || "minimalGerman".equalsIgnoreCase(language)) {
|
||||||
|
@ -231,10 +244,13 @@ public class StemmerTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||||
|
|
||||||
// Norwegian (Nynorsk) stemmers
|
// Norwegian (Nynorsk) stemmers
|
||||||
} else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) {
|
} else if ("light_nynorsk".equalsIgnoreCase(language) || "lightNynorsk".equalsIgnoreCase(language)) {
|
||||||
return new NorwegianLightStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK);
|
NorwegianLightStemFilterFactory factory = new NorwegianLightStemFilterFactory(Collections.singletonMap("variant", "nn"));
|
||||||
|
return factory.create(tokenStream);
|
||||||
} else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) {
|
} else if ("minimal_nynorsk".equalsIgnoreCase(language) || "minimalNynorsk".equalsIgnoreCase(language)) {
|
||||||
return new NorwegianMinimalStemFilter(tokenStream, NorwegianLightStemmer.NYNORSK);
|
NorwegianMinimalStemFilterFactory factory = new NorwegianMinimalStemFilterFactory(
|
||||||
|
Collections.singletonMap("variant", "nn")
|
||||||
|
);
|
||||||
|
return factory.create(tokenStream);
|
||||||
// Persian stemmers
|
// Persian stemmers
|
||||||
} else if ("persian".equalsIgnoreCase(language)) {
|
} else if ("persian".equalsIgnoreCase(language)) {
|
||||||
return new PersianStemFilter(tokenStream);
|
return new PersianStemFilter(tokenStream);
|
||||||
|
|
|
@ -278,7 +278,7 @@ public class HighlighterWithAnalyzersTests extends ESIntegTestCase {
|
||||||
boolQuery().should(matchPhrasePrefixQuery("field1", "test")).should(matchPhrasePrefixQuery("field1", "bro"))
|
boolQuery().should(matchPhrasePrefixQuery("field1", "test")).should(matchPhrasePrefixQuery("field1", "bro"))
|
||||||
).highlighter(highlight().field("field1").order("score").preTags("<x>").postTags("</x>")),
|
).highlighter(highlight().field("field1").order("score").preTags("<x>").postTags("</x>")),
|
||||||
resp -> {
|
resp -> {
|
||||||
assertThat(resp.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(resp.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
for (int i = 0; i < 2; i++) {
|
for (int i = 0; i < 2; i++) {
|
||||||
assertHighlight(
|
assertHighlight(
|
||||||
resp,
|
resp,
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the "Elastic License
|
||||||
|
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||||
|
* Public License v 1"; you may not use this file except in compliance with, at
|
||||||
|
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||||
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
|
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.env.Environment;
|
||||||
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
import org.elasticsearch.index.IndexVersion;
|
||||||
|
import org.elasticsearch.index.IndexVersions;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.elasticsearch.test.ESTokenStreamTestCase;
|
||||||
|
import org.elasticsearch.test.IndexSettingsModule;
|
||||||
|
import org.elasticsearch.test.index.IndexVersionUtils;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tests Persian Analyzer factory and behavioural changes with Lucene 10
|
||||||
|
*/
|
||||||
|
public class PersianAnalyzerProviderTests extends ESTokenStreamTestCase {
|
||||||
|
|
||||||
|
public void testPersianAnalyzerPostLucene10() throws IOException {
|
||||||
|
IndexVersion postLucene10Version = IndexVersionUtils.randomVersionBetween(
|
||||||
|
random(),
|
||||||
|
IndexVersions.UPGRADE_TO_LUCENE_10_0_0,
|
||||||
|
IndexVersion.current()
|
||||||
|
);
|
||||||
|
Settings settings = ESTestCase.indexSettings(1, 1)
|
||||||
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||||
|
.put(IndexMetadata.SETTING_VERSION_CREATED, postLucene10Version)
|
||||||
|
.build();
|
||||||
|
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||||
|
Environment environment = new Environment(settings, null);
|
||||||
|
|
||||||
|
PersianAnalyzerProvider persianAnalyzerProvider = new PersianAnalyzerProvider(
|
||||||
|
idxSettings,
|
||||||
|
environment,
|
||||||
|
"my-analyzer",
|
||||||
|
Settings.EMPTY
|
||||||
|
);
|
||||||
|
Analyzer analyzer = persianAnalyzerProvider.get();
|
||||||
|
assertAnalyzesTo(analyzer, "من کتاب های زیادی خوانده ام", new String[] { "كتاب", "زياد", "خوانده" });
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPersianAnalyzerPreLucene10() throws IOException {
|
||||||
|
IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween(
|
||||||
|
random(),
|
||||||
|
IndexVersionUtils.getFirstVersion(),
|
||||||
|
IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)
|
||||||
|
);
|
||||||
|
Settings settings = ESTestCase.indexSettings(1, 1)
|
||||||
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||||
|
.put(IndexMetadata.SETTING_VERSION_CREATED, preLucene10Version)
|
||||||
|
.build();
|
||||||
|
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||||
|
Environment environment = new Environment(settings, null);
|
||||||
|
|
||||||
|
PersianAnalyzerProvider persianAnalyzerProvider = new PersianAnalyzerProvider(
|
||||||
|
idxSettings,
|
||||||
|
environment,
|
||||||
|
"my-analyzer",
|
||||||
|
Settings.EMPTY
|
||||||
|
);
|
||||||
|
Analyzer analyzer = persianAnalyzerProvider.get();
|
||||||
|
assertAnalyzesTo(analyzer, "من کتاب های زیادی خوانده ام", new String[] { "كتاب", "زيادي", "خوانده" });
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,80 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the "Elastic License
|
||||||
|
* 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side
|
||||||
|
* Public License v 1"; you may not use this file except in compliance with, at
|
||||||
|
* your election, the "Elastic License 2.0", the "GNU Affero General Public
|
||||||
|
* License v3.0 only", or the "Server Side Public License, v 1".
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
|
import org.elasticsearch.cluster.metadata.IndexMetadata;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.env.Environment;
|
||||||
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
import org.elasticsearch.index.IndexVersion;
|
||||||
|
import org.elasticsearch.index.IndexVersions;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.elasticsearch.test.ESTokenStreamTestCase;
|
||||||
|
import org.elasticsearch.test.IndexSettingsModule;
|
||||||
|
import org.elasticsearch.test.index.IndexVersionUtils;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.apache.lucene.tests.analysis.BaseTokenStreamTestCase.assertAnalyzesTo;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies the behavior of Romanian analyzer.
|
||||||
|
*/
|
||||||
|
public class RomanianAnalyzerTests extends ESTokenStreamTestCase {
|
||||||
|
|
||||||
|
public void testRomanianAnalyzerPostLucene10() throws IOException {
|
||||||
|
IndexVersion postLucene10Version = IndexVersionUtils.randomVersionBetween(
|
||||||
|
random(),
|
||||||
|
IndexVersions.UPGRADE_TO_LUCENE_10_0_0,
|
||||||
|
IndexVersion.current()
|
||||||
|
);
|
||||||
|
Settings settings = ESTestCase.indexSettings(1, 1)
|
||||||
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||||
|
.put(IndexMetadata.SETTING_VERSION_CREATED, postLucene10Version)
|
||||||
|
.build();
|
||||||
|
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||||
|
Environment environment = new Environment(settings, null);
|
||||||
|
|
||||||
|
RomanianAnalyzerProvider romanianAnalyzerProvider = new RomanianAnalyzerProvider(
|
||||||
|
idxSettings,
|
||||||
|
environment,
|
||||||
|
"my-analyzer",
|
||||||
|
Settings.EMPTY
|
||||||
|
);
|
||||||
|
Analyzer analyzer = romanianAnalyzerProvider.get();
|
||||||
|
assertAnalyzesTo(analyzer, "absenţa", new String[] { "absenț" });
|
||||||
|
assertAnalyzesTo(analyzer, "cunoştinţă", new String[] { "cunoștinț" });
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRomanianAnalyzerPreLucene10() throws IOException {
|
||||||
|
IndexVersion preLucene10Version = IndexVersionUtils.randomVersionBetween(
|
||||||
|
random(),
|
||||||
|
IndexVersionUtils.getFirstVersion(),
|
||||||
|
IndexVersionUtils.getPreviousVersion(IndexVersions.UPGRADE_TO_LUCENE_10_0_0)
|
||||||
|
);
|
||||||
|
Settings settings = ESTestCase.indexSettings(1, 1)
|
||||||
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||||
|
.put(IndexMetadata.SETTING_VERSION_CREATED, preLucene10Version)
|
||||||
|
.build();
|
||||||
|
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
|
||||||
|
Environment environment = new Environment(settings, null);
|
||||||
|
|
||||||
|
RomanianAnalyzerProvider romanianAnalyzerProvider = new RomanianAnalyzerProvider(
|
||||||
|
idxSettings,
|
||||||
|
environment,
|
||||||
|
"my-analyzer",
|
||||||
|
Settings.EMPTY
|
||||||
|
);
|
||||||
|
Analyzer analyzer = romanianAnalyzerProvider.get();
|
||||||
|
assertAnalyzesTo(analyzer, "absenţa", new String[] { "absenţ" });
|
||||||
|
assertAnalyzesTo(analyzer, "cunoştinţă", new String[] { "cunoştinţ" });
|
||||||
|
}
|
||||||
|
}
|
|
@ -8,6 +8,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.analysis.common;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.analysis.Tokenizer;
|
import org.apache.lucene.analysis.Tokenizer;
|
||||||
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
|
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
|
||||||
|
@ -16,6 +17,7 @@ import org.apache.lucene.analysis.snowball.SnowballFilter;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.index.IndexVersion;
|
import org.elasticsearch.index.IndexVersion;
|
||||||
|
import org.elasticsearch.index.IndexVersions;
|
||||||
import org.elasticsearch.index.analysis.AnalysisTestsHelper;
|
import org.elasticsearch.index.analysis.AnalysisTestsHelper;
|
||||||
import org.elasticsearch.index.analysis.IndexAnalyzers;
|
import org.elasticsearch.index.analysis.IndexAnalyzers;
|
||||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||||
|
@ -103,6 +105,42 @@ public class StemmerTokenFilterFactoryTests extends ESTokenStreamTestCase {
|
||||||
assertEquals("Invalid stemmer class specified: [english, light_english]", e.getMessage());
|
assertEquals("Invalid stemmer class specified: [english, light_english]", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testGermanAndGerman2Stemmer() throws IOException {
|
||||||
|
IndexVersion v = IndexVersionUtils.randomVersionBetween(random(), IndexVersions.UPGRADE_TO_LUCENE_10_0_0, IndexVersion.current());
|
||||||
|
Analyzer analyzer = createGermanStemmer("german", v);
|
||||||
|
assertAnalyzesTo(analyzer, "Buecher Bücher", new String[] { "Buch", "Buch" });
|
||||||
|
|
||||||
|
analyzer = createGermanStemmer("german2", v);
|
||||||
|
assertAnalyzesTo(analyzer, "Buecher Bücher", new String[] { "Buch", "Buch" });
|
||||||
|
assertWarnings(
|
||||||
|
"The 'german2' stemmer has been deprecated and folded into the 'german' Stemmer. "
|
||||||
|
+ "Replace all usages of 'german2' with 'german'."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Analyzer createGermanStemmer(String variant, IndexVersion v) throws IOException {
|
||||||
|
|
||||||
|
Settings settings = Settings.builder()
|
||||||
|
.put("index.analysis.filter.my_german.type", "stemmer")
|
||||||
|
.put("index.analysis.filter.my_german.language", variant)
|
||||||
|
.put("index.analysis.analyzer.my_german.tokenizer", "whitespace")
|
||||||
|
.put("index.analysis.analyzer.my_german.filter", "my_german")
|
||||||
|
.put(SETTING_VERSION_CREATED, v)
|
||||||
|
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||||
|
.build();
|
||||||
|
|
||||||
|
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, PLUGIN);
|
||||||
|
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_german");
|
||||||
|
assertThat(tokenFilter, instanceOf(StemmerTokenFilterFactory.class));
|
||||||
|
Tokenizer tokenizer = new WhitespaceTokenizer();
|
||||||
|
tokenizer.setReader(new StringReader("Buecher oder Bücher"));
|
||||||
|
TokenStream create = tokenFilter.create(tokenizer);
|
||||||
|
assertThat(create, instanceOf(SnowballFilter.class));
|
||||||
|
IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers;
|
||||||
|
NamedAnalyzer analyzer = indexAnalyzers.get("my_german");
|
||||||
|
return analyzer;
|
||||||
|
}
|
||||||
|
|
||||||
public void testKpDeprecation() throws IOException {
|
public void testKpDeprecation() throws IOException {
|
||||||
IndexVersion v = IndexVersionUtils.randomVersion(random());
|
IndexVersion v = IndexVersionUtils.randomVersion(random());
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
|
|
|
@ -901,6 +901,31 @@
|
||||||
- length: { tokens: 1 }
|
- length: { tokens: 1 }
|
||||||
- match: { tokens.0.token: خورد }
|
- match: { tokens.0.token: خورد }
|
||||||
|
|
||||||
|
---
|
||||||
|
"persian stemming":
|
||||||
|
- requires:
|
||||||
|
cluster_features: ["lucene_10_upgrade"]
|
||||||
|
reason: "test requires persian analyzer stemming capabilities that come with Lucene 10"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
analysis:
|
||||||
|
analyzer:
|
||||||
|
my_analyzer:
|
||||||
|
type: persian
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.analyze:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
text: كتابها
|
||||||
|
analyzer: my_analyzer
|
||||||
|
- length: { tokens: 1 }
|
||||||
|
- match: { tokens.0.token: كتاب }
|
||||||
|
|
||||||
---
|
---
|
||||||
"portuguese":
|
"portuguese":
|
||||||
- do:
|
- do:
|
||||||
|
@ -948,7 +973,7 @@
|
||||||
text: absenţa
|
text: absenţa
|
||||||
analyzer: romanian
|
analyzer: romanian
|
||||||
- length: { tokens: 1 }
|
- length: { tokens: 1 }
|
||||||
- match: { tokens.0.token: absenţ }
|
- match: { tokens.0.token: absenț }
|
||||||
|
|
||||||
- do:
|
- do:
|
||||||
indices.analyze:
|
indices.analyze:
|
||||||
|
@ -957,7 +982,7 @@
|
||||||
text: absenţa
|
text: absenţa
|
||||||
analyzer: my_analyzer
|
analyzer: my_analyzer
|
||||||
- length: { tokens: 1 }
|
- length: { tokens: 1 }
|
||||||
- match: { tokens.0.token: absenţ }
|
- match: { tokens.0.token: absenț }
|
||||||
|
|
||||||
---
|
---
|
||||||
"russian":
|
"russian":
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.lucene.util.automaton.Automata;
|
import org.apache.lucene.util.automaton.Automata;
|
||||||
import org.apache.lucene.util.automaton.Automaton;
|
import org.apache.lucene.util.automaton.Automaton;
|
||||||
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
|
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
|
||||||
import org.apache.lucene.util.automaton.MinimizationOperations;
|
|
||||||
import org.apache.lucene.util.automaton.Operations;
|
import org.apache.lucene.util.automaton.Operations;
|
||||||
import org.apache.lucene.util.automaton.RegExp;
|
import org.apache.lucene.util.automaton.RegExp;
|
||||||
import org.elasticsearch.Build;
|
import org.elasticsearch.Build;
|
||||||
|
@ -440,13 +439,13 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic
|
||||||
? includeAutomaton
|
? includeAutomaton
|
||||||
: Operations.minus(includeAutomaton, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
|
: Operations.minus(includeAutomaton, excludeAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
|
||||||
|
|
||||||
return new CharacterRunAutomaton(MinimizationOperations.minimize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT));
|
return new CharacterRunAutomaton(Operations.determinize(finalAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Automaton patternsToAutomaton(List<String> patterns) {
|
private static Automaton patternsToAutomaton(List<String> patterns) {
|
||||||
final List<Automaton> automata = patterns.stream().map(s -> {
|
final List<Automaton> automata = patterns.stream().map(s -> {
|
||||||
final String regex = s.replace(".", "\\.").replace("*", ".*");
|
final String regex = s.replace(".", "\\.").replace("*", ".*");
|
||||||
return new RegExp(regex).toAutomaton();
|
return new RegExp(regex, RegExp.ALL | RegExp.DEPRECATED_COMPLEMENT).toAutomaton();
|
||||||
}).toList();
|
}).toList();
|
||||||
if (automata.isEmpty()) {
|
if (automata.isEmpty()) {
|
||||||
return null;
|
return null;
|
||||||
|
|
|
@ -1706,7 +1706,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("metrics-foo").addFetchField(new FieldAndFormat(DEFAULT_TIMESTAMP_FIELD, "epoch_millis")).setSize(totalDocs),
|
prepareSearch("metrics-foo").addFetchField(new FieldAndFormat(DEFAULT_TIMESTAMP_FIELD, "epoch_millis")).setSize(totalDocs),
|
||||||
resp -> {
|
resp -> {
|
||||||
assertEquals(totalDocs, resp.getHits().getTotalHits().value);
|
assertEquals(totalDocs, resp.getHits().getTotalHits().value());
|
||||||
SearchHit[] hits = resp.getHits().getHits();
|
SearchHit[] hits = resp.getHits().getHits();
|
||||||
assertEquals(totalDocs, hits.length);
|
assertEquals(totalDocs, hits.length);
|
||||||
|
|
||||||
|
@ -2027,7 +2027,7 @@ public class DataStreamIT extends ESIntegTestCase {
|
||||||
|
|
||||||
static void verifyDocs(String dataStream, long expectedNumHits, List<String> expectedIndices) {
|
static void verifyDocs(String dataStream, long expectedNumHits, List<String> expectedIndices) {
|
||||||
assertResponse(prepareSearch(dataStream).setSize((int) expectedNumHits), resp -> {
|
assertResponse(prepareSearch(dataStream).setSize((int) expectedNumHits), resp -> {
|
||||||
assertThat(resp.getHits().getTotalHits().value, equalTo(expectedNumHits));
|
assertThat(resp.getHits().getTotalHits().value(), equalTo(expectedNumHits));
|
||||||
Arrays.stream(resp.getHits().getHits()).forEach(hit -> assertTrue(expectedIndices.contains(hit.getIndex())));
|
Arrays.stream(resp.getHits().getHits()).forEach(hit -> assertTrue(expectedIndices.contains(hit.getIndex())));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -545,7 +545,7 @@ public class TSDBIndexingIT extends ESSingleNodeTestCase {
|
||||||
var searchRequest = new SearchRequest(dataStreamName);
|
var searchRequest = new SearchRequest(dataStreamName);
|
||||||
searchRequest.source().trackTotalHits(true);
|
searchRequest.source().trackTotalHits(true);
|
||||||
assertResponse(client().search(searchRequest), searchResponse -> {
|
assertResponse(client().search(searchRequest), searchResponse -> {
|
||||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numBulkRequests * numDocsPerBulk));
|
assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numBulkRequests * numDocsPerBulk));
|
||||||
String id = searchResponse.getHits().getHits()[0].getId();
|
String id = searchResponse.getHits().getHits()[0].getId();
|
||||||
assertThat(id, notNullValue());
|
assertThat(id, notNullValue());
|
||||||
|
|
||||||
|
|
|
@ -256,8 +256,8 @@ public class GeoIpDownloaderIT extends AbstractGeoIpIT {
|
||||||
res -> {
|
res -> {
|
||||||
try {
|
try {
|
||||||
TotalHits totalHits = res.getHits().getTotalHits();
|
TotalHits totalHits = res.getHits().getTotalHits();
|
||||||
assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation);
|
assertEquals(TotalHits.Relation.EQUAL_TO, totalHits.relation());
|
||||||
assertEquals(size, totalHits.value);
|
assertEquals(size, totalHits.value());
|
||||||
assertEquals(size, res.getHits().getHits().length);
|
assertEquals(size, res.getHits().getHits().length);
|
||||||
|
|
||||||
List<byte[]> data = new ArrayList<>();
|
List<byte[]> data = new ArrayList<>();
|
||||||
|
|
|
@ -81,7 +81,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get();
|
prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get();
|
||||||
assertResponse(buildRequest("doc['foo'] + 1"), rsp -> {
|
assertResponse(buildRequest("doc['foo'] + 1"), rsp -> {
|
||||||
assertEquals(1, rsp.getHits().getTotalHits().value);
|
assertEquals(1, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -91,7 +91,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get();
|
prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get();
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['foo'] + abs(1)"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['foo'] + abs(1)"), rsp -> {
|
||||||
assertEquals(1, rsp.getHits().getTotalHits().value);
|
assertEquals(1, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -102,7 +102,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
|
|
||||||
prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get();
|
prepareIndex("test").setId("1").setSource("foo", 4).setRefreshPolicy(IMMEDIATE).get();
|
||||||
assertResponse(buildRequest("doc['foo'].value + 1"), rsp -> {
|
assertResponse(buildRequest("doc['foo'].value + 1"), rsp -> {
|
||||||
assertEquals(1, rsp.getHits().getTotalHits().value);
|
assertEquals(1, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -125,7 +125,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
assertResponse(req, rsp -> {
|
assertResponse(req, rsp -> {
|
||||||
assertNoFailures(rsp);
|
assertNoFailures(rsp);
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(3, hits.getTotalHits().value);
|
assertEquals(3, hits.getTotalHits().value());
|
||||||
assertEquals("1", hits.getAt(0).getId());
|
assertEquals("1", hits.getAt(0).getId());
|
||||||
assertEquals("3", hits.getAt(1).getId());
|
assertEquals("3", hits.getAt(1).getId());
|
||||||
assertEquals("2", hits.getAt(2).getId());
|
assertEquals("2", hits.getAt(2).getId());
|
||||||
|
@ -148,25 +148,25 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z")
|
prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z")
|
||||||
);
|
);
|
||||||
assertResponse(buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()"), rsp -> {
|
assertResponse(buildRequest("doc['date0'].getSeconds() - doc['date0'].getMinutes()"), rsp -> {
|
||||||
assertEquals(2, rsp.getHits().getTotalHits().value);
|
assertEquals(2, rsp.getHits().getTotalHits().value());
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
});
|
});
|
||||||
assertResponse(buildRequest("doc['date0'].getHourOfDay() + doc['date1'].getDayOfMonth()"), rsp -> {
|
assertResponse(buildRequest("doc['date0'].getHourOfDay() + doc['date1'].getDayOfMonth()"), rsp -> {
|
||||||
assertEquals(2, rsp.getHits().getTotalHits().value);
|
assertEquals(2, rsp.getHits().getTotalHits().value());
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
});
|
});
|
||||||
assertResponse(buildRequest("doc['date1'].getMonth() + 1"), rsp -> {
|
assertResponse(buildRequest("doc['date1'].getMonth() + 1"), rsp -> {
|
||||||
assertEquals(2, rsp.getHits().getTotalHits().value);
|
assertEquals(2, rsp.getHits().getTotalHits().value());
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(9.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(9.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(10.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(10.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
});
|
});
|
||||||
assertResponse(buildRequest("doc['date1'].getYear()"), rsp -> {
|
assertResponse(buildRequest("doc['date1'].getYear()"), rsp -> {
|
||||||
assertEquals(2, rsp.getHits().getTotalHits().value);
|
assertEquals(2, rsp.getHits().getTotalHits().value());
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
|
@ -182,25 +182,25 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z")
|
prepareIndex("test").setId("2").setSource("id", 2, "date0", "2013-12-25T11:56:45Z", "date1", "1983-10-13T23:15:00Z")
|
||||||
);
|
);
|
||||||
assertResponse(buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour"), rsp -> {
|
assertResponse(buildRequest("doc['date0'].date.secondOfMinute - doc['date0'].date.minuteOfHour"), rsp -> {
|
||||||
assertEquals(2, rsp.getHits().getTotalHits().value);
|
assertEquals(2, rsp.getHits().getTotalHits().value());
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(-11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
});
|
});
|
||||||
assertResponse(buildRequest("doc['date0'].date.getHourOfDay() + doc['date1'].date.dayOfMonth"), rsp -> {
|
assertResponse(buildRequest("doc['date0'].date.getHourOfDay() + doc['date1'].date.dayOfMonth"), rsp -> {
|
||||||
assertEquals(2, rsp.getHits().getTotalHits().value);
|
assertEquals(2, rsp.getHits().getTotalHits().value());
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(24.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
});
|
});
|
||||||
assertResponse(buildRequest("doc['date1'].date.monthOfYear + 1"), rsp -> {
|
assertResponse(buildRequest("doc['date1'].date.monthOfYear + 1"), rsp -> {
|
||||||
assertEquals(2, rsp.getHits().getTotalHits().value);
|
assertEquals(2, rsp.getHits().getTotalHits().value());
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(10.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(10.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(11.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
});
|
});
|
||||||
assertResponse(buildRequest("doc['date1'].date.year"), rsp -> {
|
assertResponse(buildRequest("doc['date1'].date.year"), rsp -> {
|
||||||
assertEquals(2, rsp.getHits().getTotalHits().value);
|
assertEquals(2, rsp.getHits().getTotalHits().value());
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(1985.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(1983.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
|
@ -238,7 +238,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['double0'].count() + doc['double1'].count()"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['double0'].count() + doc['double1'].count()"), rsp -> {
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(3, hits.getTotalHits().value);
|
assertEquals(3, hits.getTotalHits().value());
|
||||||
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(2.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(2.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||||
|
@ -246,7 +246,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()"), rsp -> {
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(3, hits.getTotalHits().value);
|
assertEquals(3, hits.getTotalHits().value());
|
||||||
assertEquals(7.5, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(7.5, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(6.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
assertEquals(6.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||||
|
@ -254,7 +254,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['double0'].avg() + doc['double1'].avg()"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['double0'].avg() + doc['double1'].avg()"), rsp -> {
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(3, hits.getTotalHits().value);
|
assertEquals(3, hits.getTotalHits().value());
|
||||||
assertEquals(4.3, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(4.3, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(8.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(8.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(5.5, hits.getAt(2).field("foo").getValue(), 0.0D);
|
assertEquals(5.5, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||||
|
@ -262,7 +262,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['double0'].median()"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['double0'].median()"), rsp -> {
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(3, hits.getTotalHits().value);
|
assertEquals(3, hits.getTotalHits().value());
|
||||||
assertEquals(1.5, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(1.5, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(1.25, hits.getAt(2).field("foo").getValue(), 0.0D);
|
assertEquals(1.25, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||||
|
@ -270,7 +270,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['double0'].min()"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['double0'].min()"), rsp -> {
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(3, hits.getTotalHits().value);
|
assertEquals(3, hits.getTotalHits().value());
|
||||||
assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(-1.5, hits.getAt(2).field("foo").getValue(), 0.0D);
|
assertEquals(-1.5, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||||
|
@ -278,7 +278,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['double0'].max()"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['double0'].max()"), rsp -> {
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(3, hits.getTotalHits().value);
|
assertEquals(3, hits.getTotalHits().value());
|
||||||
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||||
|
@ -286,7 +286,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()/doc['double0'].count()"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['double0'].sum()/doc['double0'].count()"), rsp -> {
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(3, hits.getTotalHits().value);
|
assertEquals(3, hits.getTotalHits().value());
|
||||||
assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(2.5, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(1.5, hits.getAt(2).field("foo").getValue(), 0.0D);
|
assertEquals(1.5, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||||
|
@ -295,7 +295,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
// make sure count() works for missing
|
// make sure count() works for missing
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['double2'].count()"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['double2'].count()"), rsp -> {
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(3, hits.getTotalHits().value);
|
assertEquals(3, hits.getTotalHits().value());
|
||||||
assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(1.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(0.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(0.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(0.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
assertEquals(0.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||||
|
@ -304,7 +304,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
// make sure .empty works in the same way
|
// make sure .empty works in the same way
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['double2'].empty ? 5.0 : 2.0"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['double2'].empty ? 5.0 : 2.0"), rsp -> {
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(3, hits.getTotalHits().value);
|
assertEquals(3, hits.getTotalHits().value());
|
||||||
assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(2.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||||
|
@ -342,7 +342,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['x'] + 1"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['x'] + 1"), rsp -> {
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(2, rsp.getHits().getTotalHits().value);
|
assertEquals(2, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(5.0, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(1.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(1.0, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
});
|
});
|
||||||
|
@ -378,7 +378,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)";
|
String script = "doc['x'] * a + b + ((c + doc['x']) > 5000000009 ? 1 : 0)";
|
||||||
assertResponse(buildRequest(script, "a", 2, "b", 3.5, "c", 5000000000L), rsp -> {
|
assertResponse(buildRequest(script, "a", 2, "b", 3.5, "c", 5000000000L), rsp -> {
|
||||||
SearchHits hits = rsp.getHits();
|
SearchHits hits = rsp.getHits();
|
||||||
assertEquals(3, hits.getTotalHits().value);
|
assertEquals(3, hits.getTotalHits().value());
|
||||||
assertEquals(24.5, hits.getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(24.5, hits.getAt(0).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(9.5, hits.getAt(1).field("foo").getValue(), 0.0D);
|
assertEquals(9.5, hits.getAt(1).field("foo").getValue(), 0.0D);
|
||||||
assertEquals(13.5, hits.getAt(2).field("foo").getValue(), 0.0D);
|
assertEquals(13.5, hits.getAt(2).field("foo").getValue(), 0.0D);
|
||||||
|
@ -501,7 +501,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
|
|
||||||
assertResponse(req, rsp -> {
|
assertResponse(req, rsp -> {
|
||||||
assertEquals(3, rsp.getHits().getTotalHits().value);
|
assertEquals(3, rsp.getHits().getTotalHits().value());
|
||||||
|
|
||||||
Stats stats = rsp.getAggregations().get("int_agg");
|
Stats stats = rsp.getAggregations().get("int_agg");
|
||||||
assertEquals(39.0, stats.getMax(), 0.0001);
|
assertEquals(39.0, stats.getMax(), 0.0001);
|
||||||
|
@ -655,22 +655,22 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
refresh();
|
refresh();
|
||||||
// access .lat
|
// access .lat
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['location'].lat"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['location'].lat"), rsp -> {
|
||||||
assertEquals(1, rsp.getHits().getTotalHits().value);
|
assertEquals(1, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
assertEquals(61.5240, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||||
});
|
});
|
||||||
// access .lon
|
// access .lon
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['location'].lon"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['location'].lon"), rsp -> {
|
||||||
assertEquals(1, rsp.getHits().getTotalHits().value);
|
assertEquals(1, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
assertEquals(105.3188, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||||
});
|
});
|
||||||
// access .empty
|
// access .empty
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['location'].empty ? 1 : 0"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['location'].empty ? 1 : 0"), rsp -> {
|
||||||
assertEquals(1, rsp.getHits().getTotalHits().value);
|
assertEquals(1, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
assertEquals(0, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||||
});
|
});
|
||||||
// call haversin
|
// call haversin
|
||||||
assertNoFailuresAndResponse(buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("haversin(38.9072, 77.0369, doc['location'].lat, doc['location'].lon)"), rsp -> {
|
||||||
assertEquals(1, rsp.getHits().getTotalHits().value);
|
assertEquals(1, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D);
|
assertEquals(3170D, rsp.getHits().getAt(0).field("foo").getValue(), 50D);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -693,14 +693,14 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
// access .value
|
// access .value
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['vip'].value"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['vip'].value"), rsp -> {
|
||||||
assertEquals(3, rsp.getHits().getTotalHits().value);
|
assertEquals(3, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||||
assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
|
assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
|
||||||
assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
|
assertEquals(0.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
|
||||||
});
|
});
|
||||||
// access .empty
|
// access .empty
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['vip'].empty ? 1 : 0"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['vip'].empty ? 1 : 0"), rsp -> {
|
||||||
assertEquals(3, rsp.getHits().getTotalHits().value);
|
assertEquals(3, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
assertEquals(0.0D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||||
assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
|
assertEquals(0.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
|
||||||
assertEquals(1.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
|
assertEquals(1.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
|
||||||
|
@ -708,7 +708,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
// ternary operator
|
// ternary operator
|
||||||
// vip's have a 50% discount
|
// vip's have a 50% discount
|
||||||
assertNoFailuresAndResponse(buildRequest("doc['vip'] ? doc['price']/2 : doc['price']"), rsp -> {
|
assertNoFailuresAndResponse(buildRequest("doc['vip'] ? doc['price']/2 : doc['price']"), rsp -> {
|
||||||
assertEquals(3, rsp.getHits().getTotalHits().value);
|
assertEquals(3, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
assertEquals(0.5D, rsp.getHits().getAt(0).field("foo").getValue(), 1.0D);
|
||||||
assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
|
assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
|
||||||
assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
|
assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
|
||||||
|
@ -727,7 +727,7 @@ public class MoreExpressionIT extends ESIntegTestCase {
|
||||||
Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap());
|
Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap());
|
||||||
builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script)));
|
builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script)));
|
||||||
assertNoFailuresAndResponse(builder, rsp -> {
|
assertNoFailuresAndResponse(builder, rsp -> {
|
||||||
assertEquals(1, rsp.getHits().getTotalHits().value);
|
assertEquals(1, rsp.getHits().getTotalHits().value());
|
||||||
assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
|
assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,6 +17,8 @@ import org.apache.lucene.search.Rescorer;
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.elasticsearch.script.DoubleValuesScript;
|
import org.elasticsearch.script.DoubleValuesScript;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -37,12 +39,20 @@ public class ExpressionDoubleValuesScript implements DoubleValuesScript.Factory
|
||||||
return new DoubleValuesScript() {
|
return new DoubleValuesScript() {
|
||||||
@Override
|
@Override
|
||||||
public double execute() {
|
public double execute() {
|
||||||
|
try {
|
||||||
return exprScript.evaluate(new DoubleValues[0]);
|
return exprScript.evaluate(new DoubleValues[0]);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public double evaluate(DoubleValues[] functionValues) {
|
public double evaluate(DoubleValues[] functionValues) {
|
||||||
|
try {
|
||||||
return exprScript.evaluate(functionValues);
|
return exprScript.evaluate(functionValues);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.script.AggregationScript;
|
import org.elasticsearch.script.AggregationScript;
|
||||||
import org.elasticsearch.script.BucketAggregationScript;
|
import org.elasticsearch.script.BucketAggregationScript;
|
||||||
import org.elasticsearch.script.BucketAggregationSelectorScript;
|
import org.elasticsearch.script.BucketAggregationSelectorScript;
|
||||||
import org.elasticsearch.script.ClassPermission;
|
|
||||||
import org.elasticsearch.script.DoubleValuesScript;
|
import org.elasticsearch.script.DoubleValuesScript;
|
||||||
import org.elasticsearch.script.FieldScript;
|
import org.elasticsearch.script.FieldScript;
|
||||||
import org.elasticsearch.script.FilterScript;
|
import org.elasticsearch.script.FilterScript;
|
||||||
|
@ -36,9 +35,8 @@ import org.elasticsearch.script.ScriptException;
|
||||||
import org.elasticsearch.script.TermsSetQueryScript;
|
import org.elasticsearch.script.TermsSetQueryScript;
|
||||||
import org.elasticsearch.search.lookup.SearchLookup;
|
import org.elasticsearch.search.lookup.SearchLookup;
|
||||||
|
|
||||||
import java.security.AccessControlContext;
|
import java.io.IOException;
|
||||||
import java.security.AccessController;
|
import java.io.UncheckedIOException;
|
||||||
import java.security.PrivilegedAction;
|
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -156,36 +154,14 @@ public class ExpressionScriptEngine implements ScriptEngine {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public <T> T compile(String scriptName, String scriptSource, ScriptContext<T> context, Map<String, String> params) {
|
public <T> T compile(String scriptName, String scriptSource, ScriptContext<T> context, Map<String, String> params) {
|
||||||
// classloader created here
|
|
||||||
final SecurityManager sm = System.getSecurityManager();
|
|
||||||
SpecialPermission.check();
|
SpecialPermission.check();
|
||||||
Expression expr = AccessController.doPrivileged(new PrivilegedAction<Expression>() {
|
Expression expr;
|
||||||
@Override
|
|
||||||
public Expression run() {
|
|
||||||
try {
|
try {
|
||||||
// snapshot our context here, we check on behalf of the expression
|
|
||||||
AccessControlContext engineContext = AccessController.getContext();
|
|
||||||
ClassLoader loader = getClass().getClassLoader();
|
|
||||||
if (sm != null) {
|
|
||||||
loader = new ClassLoader(loader) {
|
|
||||||
@Override
|
|
||||||
protected Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException {
|
|
||||||
try {
|
|
||||||
engineContext.checkPermission(new ClassPermission(name));
|
|
||||||
} catch (SecurityException e) {
|
|
||||||
throw new ClassNotFoundException(name, e);
|
|
||||||
}
|
|
||||||
return super.loadClass(name, resolve);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
// NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here
|
// NOTE: validation is delayed to allow runtime vars, and we don't have access to per index stuff here
|
||||||
return JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS, loader);
|
expr = JavascriptCompiler.compile(scriptSource, JavascriptCompiler.DEFAULT_FUNCTIONS);
|
||||||
} catch (ParseException e) {
|
} catch (ParseException e) {
|
||||||
throw convertToScriptException("compile error", scriptSource, scriptSource, e);
|
throw convertToScriptException("compile error", scriptSource, scriptSource, e);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
});
|
|
||||||
if (contexts.containsKey(context) == false) {
|
if (contexts.containsKey(context) == false) {
|
||||||
throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]");
|
throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]");
|
||||||
}
|
}
|
||||||
|
@ -233,7 +209,11 @@ public class ExpressionScriptEngine implements ScriptEngine {
|
||||||
placeholder.setValue(((Number) value).doubleValue());
|
placeholder.setValue(((Number) value).doubleValue());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
try {
|
||||||
return expr.evaluate(functionValuesArray);
|
return expr.evaluate(functionValuesArray);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
|
@ -138,7 +138,7 @@ public class SearchTemplateResponseTests extends AbstractXContentTestCase<Search
|
||||||
SearchResponse expectedResponse = expectedInstance.getResponse();
|
SearchResponse expectedResponse = expectedInstance.getResponse();
|
||||||
SearchResponse newResponse = newInstance.getResponse();
|
SearchResponse newResponse = newInstance.getResponse();
|
||||||
|
|
||||||
assertEquals(expectedResponse.getHits().getTotalHits().value, newResponse.getHits().getTotalHits().value);
|
assertEquals(expectedResponse.getHits().getTotalHits().value(), newResponse.getHits().getTotalHits().value());
|
||||||
assertEquals(expectedResponse.getHits().getMaxScore(), newResponse.getHits().getMaxScore(), 0.0001);
|
assertEquals(expectedResponse.getHits().getMaxScore(), newResponse.getHits().getMaxScore(), 0.0001);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -74,11 +74,6 @@ public class ScriptedMetricAggContextsTests extends ScriptTestCase {
|
||||||
Map<String, Object> state = new HashMap<>();
|
Map<String, Object> state = new HashMap<>();
|
||||||
|
|
||||||
Scorable scorer = new Scorable() {
|
Scorable scorer = new Scorable() {
|
||||||
@Override
|
|
||||||
public int docID() {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() {
|
public float score() {
|
||||||
return 0.5f;
|
return 0.5f;
|
||||||
|
|
|
@ -85,7 +85,7 @@ public class SimilarityScriptTests extends ScriptTestCase {
|
||||||
3.2f
|
3.2f
|
||||||
);
|
);
|
||||||
TopDocs topDocs = searcher.search(query, 1);
|
TopDocs topDocs = searcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits.value);
|
assertEquals(1, topDocs.totalHits.value());
|
||||||
assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0);
|
assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -134,7 +134,7 @@ public class SimilarityScriptTests extends ScriptTestCase {
|
||||||
3.2f
|
3.2f
|
||||||
);
|
);
|
||||||
TopDocs topDocs = searcher.search(query, 1);
|
TopDocs topDocs = searcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits.value);
|
assertEquals(1, topDocs.totalHits.value());
|
||||||
assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0);
|
assertEquals((float) (3.2 * 2 / 3), topDocs.scoreDocs[0].score, 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ public class RankFeaturesMapperIntegrationIT extends ESIntegTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)),
|
prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE)),
|
||||||
searchResponse -> {
|
searchResponse -> {
|
||||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
for (SearchHit hit : searchResponse.getHits().getHits()) {
|
for (SearchHit hit : searchResponse.getHits().getHits()) {
|
||||||
assertThat(hit.getScore(), equalTo(20f));
|
assertThat(hit.getScore(), equalTo(20f));
|
||||||
}
|
}
|
||||||
|
@ -52,7 +52,7 @@ public class RankFeaturesMapperIntegrationIT extends ESIntegTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)),
|
prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, HIGHER_RANKED_FEATURE).boost(100f)),
|
||||||
searchResponse -> {
|
searchResponse -> {
|
||||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
for (SearchHit hit : searchResponse.getHits().getHits()) {
|
for (SearchHit hit : searchResponse.getHits().getHits()) {
|
||||||
assertThat(hit.getScore(), equalTo(2000f));
|
assertThat(hit.getScore(), equalTo(2000f));
|
||||||
}
|
}
|
||||||
|
@ -67,7 +67,7 @@ public class RankFeaturesMapperIntegrationIT extends ESIntegTestCase {
|
||||||
.minimumShouldMatch(1)
|
.minimumShouldMatch(1)
|
||||||
),
|
),
|
||||||
searchResponse -> {
|
searchResponse -> {
|
||||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
for (SearchHit hit : searchResponse.getHits().getHits()) {
|
for (SearchHit hit : searchResponse.getHits().getHits()) {
|
||||||
if (hit.getId().equals("all")) {
|
if (hit.getId().equals("all")) {
|
||||||
assertThat(hit.getScore(), equalTo(50f));
|
assertThat(hit.getScore(), equalTo(50f));
|
||||||
|
@ -83,7 +83,7 @@ public class RankFeaturesMapperIntegrationIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")),
|
prepareSearch(INDEX_NAME).setQuery(QueryBuilders.termQuery(FIELD_NAME, "missing_feature")),
|
||||||
response -> assertThat(response.getHits().getTotalHits().value, equalTo(0L))
|
response -> assertThat(response.getHits().getTotalHits().value(), equalTo(0L))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -203,7 +203,7 @@ public class TokenCountFieldMapperIntegrationIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertSearchReturns(SearchResponse result, String... ids) {
|
private void assertSearchReturns(SearchResponse result, String... ids) {
|
||||||
assertThat(result.getHits().getTotalHits().value, equalTo((long) ids.length));
|
assertThat(result.getHits().getTotalHits().value(), equalTo((long) ids.length));
|
||||||
assertThat(result.getHits().getHits().length, equalTo(ids.length));
|
assertThat(result.getHits().getHits().length, equalTo(ids.length));
|
||||||
List<String> foundIds = new ArrayList<>();
|
List<String> foundIds = new ArrayList<>();
|
||||||
for (SearchHit hit : result.getHits()) {
|
for (SearchHit hit : result.getHits()) {
|
||||||
|
|
|
@ -468,8 +468,8 @@ public class SearchAsYouTypeFieldMapper extends FieldMapper {
|
||||||
}
|
}
|
||||||
Automaton automaton = Operations.concatenate(automata);
|
Automaton automaton = Operations.concatenate(automata);
|
||||||
AutomatonQuery query = method == null
|
AutomatonQuery query = method == null
|
||||||
? new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false)
|
? new AutomatonQuery(new Term(name(), value + "*"), automaton, false)
|
||||||
: new AutomatonQuery(new Term(name(), value + "*"), automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, false, method);
|
: new AutomatonQuery(new Term(name(), value + "*"), automaton, false, method);
|
||||||
return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD)
|
return new BooleanQuery.Builder().add(query, BooleanClause.Occur.SHOULD)
|
||||||
.add(new TermQuery(new Term(parentField, value)), BooleanClause.Occur.SHOULD)
|
.add(new TermQuery(new Term(parentField, value)), BooleanClause.Occur.SHOULD)
|
||||||
.build();
|
.build();
|
||||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.QueryVisitor;
|
import org.apache.lucene.search.QueryVisitor;
|
||||||
import org.apache.lucene.search.ScoreMode;
|
import org.apache.lucene.search.ScoreMode;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
|
import org.apache.lucene.search.ScorerSupplier;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TermStatistics;
|
import org.apache.lucene.search.TermStatistics;
|
||||||
import org.apache.lucene.search.TwoPhaseIterator;
|
import org.apache.lucene.search.TwoPhaseIterator;
|
||||||
|
@ -266,7 +267,7 @@ public final class SourceConfirmedTextQuery extends Query {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
public Explanation explain(LeafReaderContext context, int doc) throws IOException {
|
||||||
RuntimePhraseScorer scorer = scorer(context);
|
RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0);
|
||||||
if (scorer == null) {
|
if (scorer == null) {
|
||||||
return Explanation.noMatch("No matching phrase");
|
return Explanation.noMatch("No matching phrase");
|
||||||
}
|
}
|
||||||
|
@ -286,15 +287,26 @@ public final class SourceConfirmedTextQuery extends Query {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public RuntimePhraseScorer scorer(LeafReaderContext context) throws IOException {
|
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
|
||||||
final Scorer approximationScorer = approximationWeight != null ? approximationWeight.scorer(context) : null;
|
ScorerSupplier approximationSupplier = approximationWeight != null ? approximationWeight.scorerSupplier(context) : null;
|
||||||
if (approximationScorer == null) {
|
if (approximationSupplier == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
return new ScorerSupplier() {
|
||||||
|
@Override
|
||||||
|
public Scorer get(long leadCost) throws IOException {
|
||||||
|
final Scorer approximationScorer = approximationSupplier.get(leadCost);
|
||||||
final DocIdSetIterator approximation = approximationScorer.iterator();
|
final DocIdSetIterator approximation = approximationScorer.iterator();
|
||||||
final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores());
|
final LeafSimScorer leafSimScorer = new LeafSimScorer(simScorer, context.reader(), field, scoreMode.needsScores());
|
||||||
final CheckedIntFunction<List<Object>, IOException> valueFetcher = valueFetcherProvider.apply(context);
|
final CheckedIntFunction<List<Object>, IOException> valueFetcher = valueFetcherProvider.apply(context);
|
||||||
return new RuntimePhraseScorer(this, approximation, leafSimScorer, valueFetcher, field, in);
|
return new RuntimePhraseScorer(approximation, leafSimScorer, valueFetcher, field, in);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long cost() {
|
||||||
|
return approximationSupplier.cost();
|
||||||
|
}
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -310,7 +322,7 @@ public final class SourceConfirmedTextQuery extends Query {
|
||||||
Weight innerWeight = in.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, 1);
|
Weight innerWeight = in.createWeight(searcher, ScoreMode.COMPLETE_NO_SCORES, 1);
|
||||||
return innerWeight.matches(context, doc);
|
return innerWeight.matches(context, doc);
|
||||||
}
|
}
|
||||||
RuntimePhraseScorer scorer = scorer(context);
|
RuntimePhraseScorer scorer = (RuntimePhraseScorer) scorerSupplier(context).get(0L);
|
||||||
if (scorer == null) {
|
if (scorer == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
@ -336,14 +348,12 @@ public final class SourceConfirmedTextQuery extends Query {
|
||||||
private float freq;
|
private float freq;
|
||||||
|
|
||||||
private RuntimePhraseScorer(
|
private RuntimePhraseScorer(
|
||||||
Weight weight,
|
|
||||||
DocIdSetIterator approximation,
|
DocIdSetIterator approximation,
|
||||||
LeafSimScorer scorer,
|
LeafSimScorer scorer,
|
||||||
CheckedIntFunction<List<Object>, IOException> valueFetcher,
|
CheckedIntFunction<List<Object>, IOException> valueFetcher,
|
||||||
String field,
|
String field,
|
||||||
Query query
|
Query query
|
||||||
) {
|
) {
|
||||||
super(weight);
|
|
||||||
this.scorer = scorer;
|
this.scorer = scorer;
|
||||||
this.valueFetcher = valueFetcher;
|
this.valueFetcher = valueFetcher;
|
||||||
this.field = field;
|
this.field = field;
|
||||||
|
|
|
@ -89,8 +89,8 @@ public class MatchOnlyTextFieldMapperTests extends MapperTestCase {
|
||||||
SearchExecutionContext context = createSearchExecutionContext(mapperService, newSearcher(reader));
|
SearchExecutionContext context = createSearchExecutionContext(mapperService, newSearcher(reader));
|
||||||
MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "brown fox");
|
MatchPhraseQueryBuilder queryBuilder = new MatchPhraseQueryBuilder("field", "brown fox");
|
||||||
TopDocs docs = context.searcher().search(queryBuilder.toQuery(context), 1);
|
TopDocs docs = context.searcher().search(queryBuilder.toQuery(context), 1);
|
||||||
assertThat(docs.totalHits.value, equalTo(1L));
|
assertThat(docs.totalHits.value(), equalTo(1L));
|
||||||
assertThat(docs.totalHits.relation, equalTo(TotalHits.Relation.EQUAL_TO));
|
assertThat(docs.totalHits.relation(), equalTo(TotalHits.Relation.EQUAL_TO));
|
||||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -61,7 +61,7 @@ public class SourceConfirmedTextQueryTests extends ESTestCase {
|
||||||
private static final IOFunction<LeafReaderContext, CheckedIntFunction<List<Object>, IOException>> SOURCE_FETCHER_PROVIDER =
|
private static final IOFunction<LeafReaderContext, CheckedIntFunction<List<Object>, IOException>> SOURCE_FETCHER_PROVIDER =
|
||||||
context -> docID -> {
|
context -> docID -> {
|
||||||
sourceFetchCount.incrementAndGet();
|
sourceFetchCount.incrementAndGet();
|
||||||
return Collections.<Object>singletonList(context.reader().document(docID).get("body"));
|
return Collections.<Object>singletonList(context.reader().storedFields().document(docID).get("body"));
|
||||||
};
|
};
|
||||||
|
|
||||||
public void testTerm() throws Exception {
|
public void testTerm() throws Exception {
|
||||||
|
|
|
@ -41,7 +41,7 @@ import java.util.List;
|
||||||
public class SourceIntervalsSourceTests extends ESTestCase {
|
public class SourceIntervalsSourceTests extends ESTestCase {
|
||||||
|
|
||||||
private static final IOFunction<LeafReaderContext, CheckedIntFunction<List<Object>, IOException>> SOURCE_FETCHER_PROVIDER =
|
private static final IOFunction<LeafReaderContext, CheckedIntFunction<List<Object>, IOException>> SOURCE_FETCHER_PROVIDER =
|
||||||
context -> docID -> Collections.<Object>singletonList(context.reader().document(docID).get("body"));
|
context -> docID -> Collections.<Object>singletonList(context.reader().storedFields().document(docID).get("body"));
|
||||||
|
|
||||||
public void testIntervals() throws IOException {
|
public void testIntervals() throws IOException {
|
||||||
final FieldType ft = new FieldType(TextField.TYPE_STORED);
|
final FieldType ft = new FieldType(TextField.TYPE_STORED);
|
||||||
|
|
|
@ -115,7 +115,7 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
||||||
logger.info("bucket={}", bucket.getKey());
|
logger.info("bucket={}", bucket.getKey());
|
||||||
Children childrenBucket = bucket.getAggregations().get("to_comment");
|
Children childrenBucket = bucket.getAggregations().get("to_comment");
|
||||||
TopHits topHits = childrenBucket.getAggregations().get("top_comments");
|
TopHits topHits = childrenBucket.getAggregations().get("top_comments");
|
||||||
logger.info("total_hits={}", topHits.getHits().getTotalHits().value);
|
logger.info("total_hits={}", topHits.getHits().getTotalHits().value());
|
||||||
for (SearchHit searchHit : topHits.getHits()) {
|
for (SearchHit searchHit : topHits.getHits()) {
|
||||||
logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId());
|
logger.info("hit= {} {}", searchHit.getSortValues()[0], searchHit.getId());
|
||||||
}
|
}
|
||||||
|
@ -129,7 +129,7 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
||||||
assertThat(childrenBucket.getName(), equalTo("to_comment"));
|
assertThat(childrenBucket.getName(), equalTo("to_comment"));
|
||||||
assertThat(childrenBucket.getDocCount(), equalTo(2L));
|
assertThat(childrenBucket.getDocCount(), equalTo(2L));
|
||||||
TopHits topHits = childrenBucket.getAggregations().get("top_comments");
|
TopHits topHits = childrenBucket.getAggregations().get("top_comments");
|
||||||
assertThat(topHits.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(topHits.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(topHits.getHits().getAt(0).getId(), equalTo("e"));
|
assertThat(topHits.getHits().getAt(0).getId(), equalTo("e"));
|
||||||
assertThat(topHits.getHits().getAt(1).getId(), equalTo("f"));
|
assertThat(topHits.getHits().getAt(1).getId(), equalTo("f"));
|
||||||
|
|
||||||
|
@ -141,7 +141,7 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
||||||
assertThat(childrenBucket.getName(), equalTo("to_comment"));
|
assertThat(childrenBucket.getName(), equalTo("to_comment"));
|
||||||
assertThat(childrenBucket.getDocCount(), equalTo(1L));
|
assertThat(childrenBucket.getDocCount(), equalTo(1L));
|
||||||
topHits = childrenBucket.getAggregations().get("top_comments");
|
topHits = childrenBucket.getAggregations().get("top_comments");
|
||||||
assertThat(topHits.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(topHits.getHits().getAt(0).getId(), equalTo("f"));
|
assertThat(topHits.getHits().getAt(0).getId(), equalTo("f"));
|
||||||
|
|
||||||
categoryBucket = categoryTerms.getBucketByKey("c");
|
categoryBucket = categoryTerms.getBucketByKey("c");
|
||||||
|
@ -152,7 +152,7 @@ public class ChildrenIT extends AbstractParentChildTestCase {
|
||||||
assertThat(childrenBucket.getName(), equalTo("to_comment"));
|
assertThat(childrenBucket.getName(), equalTo("to_comment"));
|
||||||
assertThat(childrenBucket.getDocCount(), equalTo(1L));
|
assertThat(childrenBucket.getDocCount(), equalTo(1L));
|
||||||
topHits = childrenBucket.getAggregations().get("top_comments");
|
topHits = childrenBucket.getAggregations().get("top_comments");
|
||||||
assertThat(topHits.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(topHits.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(topHits.getHits().getAt(0).getId(), equalTo("f"));
|
assertThat(topHits.getHits().getAt(0).getId(), equalTo("f"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
|
@ -107,7 +107,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -117,7 +117,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false))
|
boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", "p_value1"), false))
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("c1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("c1"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -127,7 +127,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false))
|
boolQuery().must(matchAllQuery()).filter(hasParentQuery("child", termQuery("c_field", "c_value1"), false))
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("gc1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("gc1"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -135,7 +135,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)),
|
prepareSearch("test").setQuery(hasParentQuery("parent", termQuery("p_field", "p_value1"), false)),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("c1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("c1"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -143,7 +143,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)),
|
prepareSearch("test").setQuery(hasParentQuery("child", termQuery("c_field", "c_value1"), false)),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("gc1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("gc1"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -161,7 +161,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)),
|
prepareSearch("test").setQuery(hasChildQuery("test", matchQuery("foo", 1), ScoreMode.None)),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -182,7 +182,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
|
|
||||||
// TEST FETCHING _parent from child
|
// TEST FETCHING _parent from child
|
||||||
assertNoFailuresAndResponse(prepareSearch("test").setQuery(idsQuery().addIds("c1")), response -> {
|
assertNoFailuresAndResponse(prepareSearch("test").setQuery(idsQuery().addIds("c1")), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("c1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("c1"));
|
||||||
assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child"));
|
assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child"));
|
||||||
assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1"));
|
assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1"));
|
||||||
|
@ -195,7 +195,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))
|
boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child"))
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2")));
|
assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2")));
|
||||||
assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child"));
|
assertThat(extractValue("join_field.name", response.getHits().getAt(0).getSourceAsMap()), equalTo("child"));
|
||||||
assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1"));
|
assertThat(extractValue("join_field.parent", response.getHits().getAt(0).getSourceAsMap()), equalTo("p1"));
|
||||||
|
@ -208,7 +208,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
// HAS CHILD
|
// HAS CHILD
|
||||||
assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")), response -> {
|
assertNoFailuresAndResponse(prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow")), response -> {
|
||||||
assertHitCount(response, 1L);
|
assertHitCount(response, 1L);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -307,8 +307,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
).setSize(numChildDocsPerParent),
|
).setSize(numChildDocsPerParent),
|
||||||
response -> {
|
response -> {
|
||||||
Set<String> childIds = parentToChildrenEntry.getValue();
|
Set<String> childIds = parentToChildrenEntry.getValue();
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo((long) childIds.size()));
|
assertThat(response.getHits().getTotalHits().value(), equalTo((long) childIds.size()));
|
||||||
for (int i = 0; i < response.getHits().getTotalHits().value; i++) {
|
for (int i = 0; i < response.getHits().getTotalHits().value(); i++) {
|
||||||
assertThat(childIds.remove(response.getHits().getAt(i).getId()), is(true));
|
assertThat(childIds.remove(response.getHits().getAt(i).getId()), is(true));
|
||||||
assertThat(response.getHits().getAt(i).getScore(), is(1.0f));
|
assertThat(response.getHits().getAt(i).getScore(), is(1.0f));
|
||||||
}
|
}
|
||||||
|
@ -341,7 +341,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)),
|
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None)),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -349,7 +349,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)),
|
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None)),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("p2"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("p2"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -357,7 +357,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)),
|
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None)),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||||
assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||||
}
|
}
|
||||||
|
@ -367,7 +367,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))),
|
prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -375,7 +375,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))),
|
prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "blue"), ScoreMode.None))),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("p2"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("p2"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -383,7 +383,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))),
|
prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "red"), ScoreMode.None))),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||||
assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||||
}
|
}
|
||||||
|
@ -426,7 +426,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
assertThat(response.getHits().getAt(0).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||||
assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
assertThat(response.getHits().getAt(1).getId(), anyOf(equalTo("p2"), equalTo("p1")));
|
||||||
|
|
||||||
|
@ -458,7 +458,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))),
|
prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
||||||
assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\""));
|
assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\""));
|
||||||
}
|
}
|
||||||
|
@ -472,7 +472,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))),
|
prepareSearch("test").setQuery(constantScoreQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.None))),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
||||||
assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1_updated\""));
|
assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1_updated\""));
|
||||||
}
|
}
|
||||||
|
@ -647,7 +647,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("1"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("1"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -667,7 +667,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("2"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("2"));
|
||||||
|
@ -687,7 +687,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(4f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("2"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("2"));
|
||||||
|
@ -707,7 +707,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
)
|
)
|
||||||
).addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()),
|
).addSort(SortBuilders.fieldSort("c_field3")).addSort(SortBuilders.scoreSort()),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(7L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(7L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("16"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("16"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(5f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(5f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("17"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("17"));
|
||||||
|
@ -768,7 +768,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None))
|
boolQuery().must(matchAllQuery()).filter(hasChildQuery("child", termQuery("c_field", 1), ScoreMode.None))
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("1"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("1"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -778,7 +778,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false))
|
boolQuery().must(matchAllQuery()).filter(hasParentQuery("parent", termQuery("p_field", 1), false))
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("2"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("2"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -801,7 +801,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("1"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("1"));
|
||||||
SearchHit[] searchHits = response.getHits().getHits()[0].getInnerHits().get("child").getHits();
|
SearchHit[] searchHits = response.getHits().getHits()[0].getInnerHits().get("child").getHits();
|
||||||
assertThat(searchHits.length, equalTo(1));
|
assertThat(searchHits.length, equalTo(1));
|
||||||
|
@ -888,7 +888,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
.addSort("p_field", SortOrder.ASC)
|
.addSort("p_field", SortOrder.ASC)
|
||||||
.setSize(5),
|
.setSize(5),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(10L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(10L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("p000"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("p000"));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("p001"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("p001"));
|
||||||
assertThat(response.getHits().getHits()[2].getId(), equalTo("p002"));
|
assertThat(response.getHits().getHits()[2].getId(), equalTo("p002"));
|
||||||
|
@ -903,7 +903,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
.addSort("c_field", SortOrder.ASC)
|
.addSort("c_field", SortOrder.ASC)
|
||||||
.setSize(5),
|
.setSize(5),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(500L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(500L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("c000"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("c000"));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("c001"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("c001"));
|
||||||
assertThat(response.getHits().getHits()[2].getId(), equalTo("c002"));
|
assertThat(response.getHits().getHits()[2].getId(), equalTo("c002"));
|
||||||
|
@ -932,7 +932,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)),
|
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
||||||
assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\""));
|
assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\""));
|
||||||
}
|
}
|
||||||
|
@ -943,7 +943,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true))
|
boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true))
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("c3"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("c3"));
|
||||||
assertThat(response.getHits().getAt(1).getId(), equalTo("c4"));
|
assertThat(response.getHits().getAt(1).getId(), equalTo("c4"));
|
||||||
}
|
}
|
||||||
|
@ -961,7 +961,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)),
|
prepareSearch("test").setQuery(hasChildQuery("child", termQuery("c_field", "yellow"), ScoreMode.Total)),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("p1"));
|
||||||
assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\""));
|
assertThat(response.getHits().getAt(0).getSourceAsString(), containsString("\"p_value1\""));
|
||||||
}
|
}
|
||||||
|
@ -972,7 +972,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true))
|
boolQuery().must(matchQuery("c_field", "x")).must(hasParentQuery("parent", termQuery("p_field", "p_value2"), true))
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4")));
|
assertThat(response.getHits().getAt(0).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4")));
|
||||||
assertThat(response.getHits().getAt(1).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4")));
|
assertThat(response.getHits().getAt(1).getId(), Matchers.anyOf(equalTo("c3"), equalTo("c4")));
|
||||||
}
|
}
|
||||||
|
@ -996,7 +996,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total)).setMinScore(3),
|
prepareSearch("test").setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.Total)).setMinScore(3),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("p2"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("p2"));
|
||||||
assertThat(response.getHits().getAt(0).getScore(), equalTo(3.0f));
|
assertThat(response.getHits().getAt(0).getScore(), equalTo(3.0f));
|
||||||
}
|
}
|
||||||
|
@ -1411,7 +1411,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
10,
|
10,
|
||||||
(respNum, response) -> {
|
(respNum, response) -> {
|
||||||
assertNoFailures(response);
|
assertNoFailures(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(10L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(10L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -1469,7 +1469,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
|
|
||||||
// Score mode = NONE
|
// Score mode = NONE
|
||||||
assertResponse(minMaxQuery(ScoreMode.None, 1, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.None, 1, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("2"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("2"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1479,7 +1479,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.None, 2, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.None, 2, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("4"));
|
||||||
|
@ -1487,7 +1487,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.None, 3, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.None, 3, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
||||||
});
|
});
|
||||||
|
@ -1495,7 +1495,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertHitCount(minMaxQuery(ScoreMode.None, 4, null), 0L);
|
assertHitCount(minMaxQuery(ScoreMode.None, 4, null), 0L);
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.None, 1, 4), response -> {
|
assertResponse(minMaxQuery(ScoreMode.None, 1, 4), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("2"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("2"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1505,7 +1505,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.None, 1, 3), response -> {
|
assertResponse(minMaxQuery(ScoreMode.None, 1, 3), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("2"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("2"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1515,7 +1515,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.None, 1, 2), response -> {
|
assertResponse(minMaxQuery(ScoreMode.None, 1, 2), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("2"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("2"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1523,7 +1523,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.None, 2, 2), response -> {
|
assertResponse(minMaxQuery(ScoreMode.None, 2, 2), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1f));
|
||||||
});
|
});
|
||||||
|
@ -1533,7 +1533,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
|
|
||||||
// Score mode = SUM
|
// Score mode = SUM
|
||||||
assertResponse(minMaxQuery(ScoreMode.Total, 1, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Total, 1, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1543,7 +1543,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Total, 2, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Total, 2, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1551,7 +1551,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Total, 3, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Total, 3, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
||||||
});
|
});
|
||||||
|
@ -1559,7 +1559,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertHitCount(minMaxQuery(ScoreMode.Total, 4, null), 0L);
|
assertHitCount(minMaxQuery(ScoreMode.Total, 4, null), 0L);
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Total, 1, 4), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Total, 1, 4), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1569,7 +1569,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Total, 1, 3), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Total, 1, 3), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(6f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1579,7 +1579,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Total, 1, 2), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Total, 1, 2), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("2"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("2"));
|
||||||
|
@ -1587,7 +1587,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Total, 2, 2), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Total, 2, 2), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
||||||
});
|
});
|
||||||
|
@ -1597,7 +1597,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
|
|
||||||
// Score mode = MAX
|
// Score mode = MAX
|
||||||
assertResponse(minMaxQuery(ScoreMode.Max, 1, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Max, 1, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1607,7 +1607,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Max, 2, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Max, 2, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1615,7 +1615,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Max, 3, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Max, 3, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
||||||
});
|
});
|
||||||
|
@ -1623,7 +1623,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertHitCount(minMaxQuery(ScoreMode.Max, 4, null), 0L);
|
assertHitCount(minMaxQuery(ScoreMode.Max, 4, null), 0L);
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Max, 1, 4), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Max, 1, 4), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1633,7 +1633,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Max, 1, 3), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Max, 1, 3), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(3f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1643,7 +1643,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Max, 1, 2), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Max, 1, 2), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("2"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("2"));
|
||||||
|
@ -1651,7 +1651,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Max, 2, 2), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Max, 2, 2), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
||||||
});
|
});
|
||||||
|
@ -1661,7 +1661,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
|
|
||||||
// Score mode = AVG
|
// Score mode = AVG
|
||||||
assertResponse(minMaxQuery(ScoreMode.Avg, 1, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Avg, 1, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1671,7 +1671,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Avg, 2, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Avg, 2, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1679,7 +1679,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Avg, 3, null), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Avg, 3, null), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
||||||
});
|
});
|
||||||
|
@ -1687,7 +1687,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
assertHitCount(minMaxQuery(ScoreMode.Avg, 4, null), 0L);
|
assertHitCount(minMaxQuery(ScoreMode.Avg, 4, null), 0L);
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Avg, 1, 4), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Avg, 1, 4), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1697,7 +1697,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Avg, 1, 3), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Avg, 1, 3), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(3L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(3L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("4"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(2f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("3"));
|
||||||
|
@ -1707,7 +1707,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Avg, 1, 2), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Avg, 1, 2), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f));
|
||||||
assertThat(response.getHits().getHits()[1].getId(), equalTo("2"));
|
assertThat(response.getHits().getHits()[1].getId(), equalTo("2"));
|
||||||
|
@ -1715,7 +1715,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
assertResponse(minMaxQuery(ScoreMode.Avg, 2, 2), response -> {
|
assertResponse(minMaxQuery(ScoreMode.Avg, 2, 2), response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
assertThat(response.getHits().getHits()[0].getId(), equalTo("3"));
|
||||||
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f));
|
assertThat(response.getHits().getHits()[0].getScore(), equalTo(1.5f));
|
||||||
});
|
});
|
||||||
|
|
|
@ -128,7 +128,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
||||||
|
|
||||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||||
assertThat(innerHits.getTotalHits().value, equalTo(2L));
|
assertThat(innerHits.getTotalHits().value(), equalTo(2L));
|
||||||
|
|
||||||
assertThat(innerHits.getAt(0).getId(), equalTo("c1"));
|
assertThat(innerHits.getAt(0).getId(), equalTo("c1"));
|
||||||
assertThat(innerHits.getAt(1).getId(), equalTo("c2"));
|
assertThat(innerHits.getAt(1).getId(), equalTo("c2"));
|
||||||
|
@ -148,7 +148,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
||||||
|
|
||||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||||
assertThat(innerHits.getTotalHits().value, equalTo(3L));
|
assertThat(innerHits.getTotalHits().value(), equalTo(3L));
|
||||||
|
|
||||||
assertThat(innerHits.getAt(0).getId(), equalTo("c4"));
|
assertThat(innerHits.getAt(0).getId(), equalTo("c4"));
|
||||||
assertThat(innerHits.getAt(1).getId(), equalTo("c5"));
|
assertThat(innerHits.getAt(1).getId(), equalTo("c5"));
|
||||||
|
@ -280,7 +280,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
||||||
assertThat(searchHit.getShard(), notNullValue());
|
assertThat(searchHit.getShard(), notNullValue());
|
||||||
|
|
||||||
SearchHits inner = searchHit.getInnerHits().get("a");
|
SearchHits inner = searchHit.getInnerHits().get("a");
|
||||||
assertThat(inner.getTotalHits().value, equalTo((long) child1InnerObjects[parent]));
|
assertThat(inner.getTotalHits().value(), equalTo((long) child1InnerObjects[parent]));
|
||||||
for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) {
|
for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) {
|
||||||
SearchHit innerHit = inner.getAt(child);
|
SearchHit innerHit = inner.getAt(child);
|
||||||
String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child);
|
String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child);
|
||||||
|
@ -290,7 +290,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
||||||
offset1 += child1InnerObjects[parent];
|
offset1 += child1InnerObjects[parent];
|
||||||
|
|
||||||
inner = searchHit.getInnerHits().get("b");
|
inner = searchHit.getInnerHits().get("b");
|
||||||
assertThat(inner.getTotalHits().value, equalTo((long) child2InnerObjects[parent]));
|
assertThat(inner.getTotalHits().value(), equalTo((long) child2InnerObjects[parent]));
|
||||||
for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) {
|
for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) {
|
||||||
SearchHit innerHit = inner.getAt(child);
|
SearchHit innerHit = inner.getAt(child);
|
||||||
String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child);
|
String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child);
|
||||||
|
@ -347,12 +347,12 @@ public class InnerHitsIT extends ParentChildTestCase {
|
||||||
|
|
||||||
SearchHit searchHit = response.getHits().getAt(0);
|
SearchHit searchHit = response.getHits().getAt(0);
|
||||||
assertThat(searchHit.getId(), equalTo("3"));
|
assertThat(searchHit.getId(), equalTo("3"));
|
||||||
assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L));
|
assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1"));
|
assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1"));
|
||||||
|
|
||||||
searchHit = response.getHits().getAt(1);
|
searchHit = response.getHits().getAt(1);
|
||||||
assertThat(searchHit.getId(), equalTo("4"));
|
assertThat(searchHit.getId(), equalTo("4"));
|
||||||
assertThat(searchHit.getInnerHits().get("question").getTotalHits().value, equalTo(1L));
|
assertThat(searchHit.getInnerHits().get("question").getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2"));
|
assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -394,11 +394,11 @@ public class InnerHitsIT extends ParentChildTestCase {
|
||||||
|
|
||||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||||
assertThat(innerHits.getTotalHits().value, equalTo(1L));
|
assertThat(innerHits.getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(innerHits.getAt(0).getId(), equalTo("3"));
|
assertThat(innerHits.getAt(0).getId(), equalTo("3"));
|
||||||
|
|
||||||
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
|
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
|
||||||
assertThat(innerHits.getTotalHits().value, equalTo(1L));
|
assertThat(innerHits.getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(innerHits.getAt(0).getId(), equalTo("5"));
|
assertThat(innerHits.getAt(0).getId(), equalTo("5"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -417,11 +417,11 @@ public class InnerHitsIT extends ParentChildTestCase {
|
||||||
|
|
||||||
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
assertThat(response.getHits().getAt(0).getInnerHits().size(), equalTo(1));
|
||||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
|
||||||
assertThat(innerHits.getTotalHits().value, equalTo(1L));
|
assertThat(innerHits.getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(innerHits.getAt(0).getId(), equalTo("4"));
|
assertThat(innerHits.getAt(0).getId(), equalTo("4"));
|
||||||
|
|
||||||
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
|
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
|
||||||
assertThat(innerHits.getTotalHits().value, equalTo(1L));
|
assertThat(innerHits.getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(innerHits.getAt(0).getId(), equalTo("6"));
|
assertThat(innerHits.getAt(0).getId(), equalTo("6"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -482,34 +482,34 @@ public class InnerHitsIT extends ParentChildTestCase {
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("duke"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("duke"));
|
||||||
|
|
||||||
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls");
|
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("earls");
|
||||||
assertThat(innerHits.getTotalHits().value, equalTo(4L));
|
assertThat(innerHits.getTotalHits().value(), equalTo(4L));
|
||||||
assertThat(innerHits.getAt(0).getId(), equalTo("earl1"));
|
assertThat(innerHits.getAt(0).getId(), equalTo("earl1"));
|
||||||
assertThat(innerHits.getAt(1).getId(), equalTo("earl2"));
|
assertThat(innerHits.getAt(1).getId(), equalTo("earl2"));
|
||||||
assertThat(innerHits.getAt(2).getId(), equalTo("earl3"));
|
assertThat(innerHits.getAt(2).getId(), equalTo("earl3"));
|
||||||
assertThat(innerHits.getAt(3).getId(), equalTo("earl4"));
|
assertThat(innerHits.getAt(3).getId(), equalTo("earl4"));
|
||||||
|
|
||||||
SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons");
|
SearchHits innerInnerHits = innerHits.getAt(0).getInnerHits().get("barons");
|
||||||
assertThat(innerInnerHits.getTotalHits().value, equalTo(1L));
|
assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1"));
|
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron1"));
|
||||||
|
|
||||||
innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons");
|
innerInnerHits = innerHits.getAt(1).getInnerHits().get("barons");
|
||||||
assertThat(innerInnerHits.getTotalHits().value, equalTo(1L));
|
assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2"));
|
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron2"));
|
||||||
|
|
||||||
innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons");
|
innerInnerHits = innerHits.getAt(2).getInnerHits().get("barons");
|
||||||
assertThat(innerInnerHits.getTotalHits().value, equalTo(1L));
|
assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3"));
|
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron3"));
|
||||||
|
|
||||||
innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons");
|
innerInnerHits = innerHits.getAt(3).getInnerHits().get("barons");
|
||||||
assertThat(innerInnerHits.getTotalHits().value, equalTo(1L));
|
assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4"));
|
assertThat(innerInnerHits.getAt(0).getId(), equalTo("baron4"));
|
||||||
|
|
||||||
innerHits = response.getHits().getAt(0).getInnerHits().get("princes");
|
innerHits = response.getHits().getAt(0).getInnerHits().get("princes");
|
||||||
assertThat(innerHits.getTotalHits().value, equalTo(1L));
|
assertThat(innerHits.getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(innerHits.getAt(0).getId(), equalTo("prince"));
|
assertThat(innerHits.getAt(0).getId(), equalTo("prince"));
|
||||||
|
|
||||||
innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings");
|
innerInnerHits = innerHits.getAt(0).getInnerHits().get("kings");
|
||||||
assertThat(innerInnerHits.getTotalHits().value, equalTo(1L));
|
assertThat(innerInnerHits.getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(innerInnerHits.getAt(0).getId(), equalTo("king"));
|
assertThat(innerInnerHits.getAt(0).getId(), equalTo("king"));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -532,12 +532,12 @@ public class InnerHitsIT extends ParentChildTestCase {
|
||||||
response -> {
|
response -> {
|
||||||
assertHitCount(response, 2);
|
assertHitCount(response, 2);
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
|
||||||
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
|
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
|
||||||
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
|
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
|
||||||
|
|
||||||
assertThat(response.getHits().getAt(1).getId(), equalTo("2"));
|
assertThat(response.getHits().getAt(1).getId(), equalTo("2"));
|
||||||
assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getAt(1).getInnerHits().get("child").getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
|
assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
|
||||||
assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
|
assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
|
||||||
}
|
}
|
||||||
|
@ -549,7 +549,7 @@ public class InnerHitsIT extends ParentChildTestCase {
|
||||||
assertResponse(prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC), response -> {
|
assertResponse(prepareSearch("index").setQuery(query).addSort("id", SortOrder.ASC), response -> {
|
||||||
assertHitCount(response, 1);
|
assertHitCount(response, 1);
|
||||||
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
|
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
|
||||||
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getTotalHits().value(), equalTo(1L));
|
||||||
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
|
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
|
||||||
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2"));
|
assertThat(response.getHits().getAt(0).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name2"));
|
||||||
});
|
});
|
||||||
|
|
|
@ -102,7 +102,7 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements
|
||||||
public void collect(int docId, long owningBucketOrd) throws IOException {
|
public void collect(int docId, long owningBucketOrd) throws IOException {
|
||||||
if (parentDocs.get(docId) && globalOrdinals.advanceExact(docId)) {
|
if (parentDocs.get(docId) && globalOrdinals.advanceExact(docId)) {
|
||||||
int globalOrdinal = (int) globalOrdinals.nextOrd();
|
int globalOrdinal = (int) globalOrdinals.nextOrd();
|
||||||
assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS;
|
assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1;
|
||||||
collectionStrategy.add(owningBucketOrd, globalOrdinal);
|
collectionStrategy.add(owningBucketOrd, globalOrdinal);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -134,11 +134,6 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements
|
||||||
public float score() {
|
public float score() {
|
||||||
return 1f;
|
return 1f;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public int docID() {
|
|
||||||
return childDocsIter.docID();
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
final Bits liveDocs = ctx.reader().getLiveDocs();
|
final Bits liveDocs = ctx.reader().getLiveDocs();
|
||||||
|
@ -150,7 +145,7 @@ public abstract class ParentJoinAggregator extends BucketsAggregator implements
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
int globalOrdinal = (int) globalOrdinals.nextOrd();
|
int globalOrdinal = (int) globalOrdinals.nextOrd();
|
||||||
assert globalOrdinal != -1 && globalOrdinals.nextOrd() == SortedSetDocValues.NO_MORE_ORDS;
|
assert globalOrdinal != -1 && globalOrdinals.docValueCount() == 1;
|
||||||
/*
|
/*
|
||||||
* Check if we contain every ordinal. It's almost certainly be
|
* Check if we contain every ordinal. It's almost certainly be
|
||||||
* faster to replay all the matching ordinals and filter them down
|
* faster to replay all the matching ordinals and filter them down
|
||||||
|
|
|
@ -20,8 +20,8 @@ import org.apache.lucene.search.ScoreMode;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.TopDocs;
|
import org.apache.lucene.search.TopDocs;
|
||||||
import org.apache.lucene.search.TopDocsCollector;
|
import org.apache.lucene.search.TopDocsCollector;
|
||||||
import org.apache.lucene.search.TopFieldCollector;
|
import org.apache.lucene.search.TopFieldCollectorManager;
|
||||||
import org.apache.lucene.search.TopScoreDocCollector;
|
import org.apache.lucene.search.TopScoreDocCollectorManager;
|
||||||
import org.apache.lucene.search.TotalHitCountCollector;
|
import org.apache.lucene.search.TotalHitCountCollector;
|
||||||
import org.apache.lucene.search.TotalHits;
|
import org.apache.lucene.search.TotalHits;
|
||||||
import org.apache.lucene.search.Weight;
|
import org.apache.lucene.search.Weight;
|
||||||
|
@ -137,12 +137,12 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder {
|
||||||
TopDocsCollector<?> topDocsCollector;
|
TopDocsCollector<?> topDocsCollector;
|
||||||
MaxScoreCollector maxScoreCollector = null;
|
MaxScoreCollector maxScoreCollector = null;
|
||||||
if (sort() != null) {
|
if (sort() != null) {
|
||||||
topDocsCollector = TopFieldCollector.create(sort().sort, topN, Integer.MAX_VALUE);
|
topDocsCollector = new TopFieldCollectorManager(sort().sort, topN, null, Integer.MAX_VALUE, false).newCollector();
|
||||||
if (trackScores()) {
|
if (trackScores()) {
|
||||||
maxScoreCollector = new MaxScoreCollector();
|
maxScoreCollector = new MaxScoreCollector();
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE);
|
topDocsCollector = new TopScoreDocCollectorManager(topN, null, Integer.MAX_VALUE, false).newCollector();
|
||||||
maxScoreCollector = new MaxScoreCollector();
|
maxScoreCollector = new MaxScoreCollector();
|
||||||
}
|
}
|
||||||
for (LeafReaderContext ctx : this.context.searcher().getIndexReader().leaves()) {
|
for (LeafReaderContext ctx : this.context.searcher().getIndexReader().leaves()) {
|
||||||
|
|
|
@ -107,7 +107,7 @@ public class ChildrenToParentAggregatorTests extends AggregatorTestCase {
|
||||||
|
|
||||||
// verify for each children
|
// verify for each children
|
||||||
for (String parent : expectedParentChildRelations.keySet()) {
|
for (String parent : expectedParentChildRelations.keySet()) {
|
||||||
testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId("child0_" + parent)), indexReader, aggregation -> {
|
testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId("child0_" + parent))), indexReader, aggregation -> {
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"Expected one result for min-aggregation for parent: " + parent + ", but had aggregation-results: " + aggregation,
|
"Expected one result for min-aggregation for parent: " + parent + ", but had aggregation-results: " + aggregation,
|
||||||
1,
|
1,
|
||||||
|
|
|
@ -104,7 +104,7 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
for (String parent : expectedParentChildRelations.keySet()) {
|
for (String parent : expectedParentChildRelations.keySet()) {
|
||||||
testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(parent)), indexReader, child -> {
|
testCase(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(parent))), indexReader, child -> {
|
||||||
assertEquals((long) expectedParentChildRelations.get(parent).v1(), child.getDocCount());
|
assertEquals((long) expectedParentChildRelations.get(parent).v1(), child.getDocCount());
|
||||||
assertEquals(
|
assertEquals(
|
||||||
expectedParentChildRelations.get(parent).v2(),
|
expectedParentChildRelations.get(parent).v2(),
|
||||||
|
|
|
@ -54,6 +54,7 @@ import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery;
|
import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery;
|
||||||
|
@ -341,13 +342,13 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
||||||
BooleanQuery booleanQuery = (BooleanQuery) lateParsingQuery.getInnerQuery();
|
BooleanQuery booleanQuery = (BooleanQuery) lateParsingQuery.getInnerQuery();
|
||||||
assertThat(booleanQuery.clauses().size(), equalTo(2));
|
assertThat(booleanQuery.clauses().size(), equalTo(2));
|
||||||
// check the inner ids query, we have to call rewrite to get to check the type it's executed against
|
// check the inner ids query, we have to call rewrite to get to check the type it's executed against
|
||||||
assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST));
|
assertThat(booleanQuery.clauses().get(0).occur(), equalTo(BooleanClause.Occur.MUST));
|
||||||
assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class));
|
assertThat(booleanQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class));
|
||||||
TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).getQuery();
|
TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).query();
|
||||||
assertEquals(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(id)), termsQuery);
|
assertEquals(new TermInSetQuery(IdFieldMapper.NAME, List.of(Uid.encodeId(id))), termsQuery);
|
||||||
// check the type filter
|
// check the type filter
|
||||||
assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.FILTER));
|
assertThat(booleanQuery.clauses().get(1).occur(), equalTo(BooleanClause.Occur.FILTER));
|
||||||
assertEquals(new TermQuery(new Term("join_field", type)), booleanQuery.clauses().get(1).getQuery());
|
assertEquals(new TermQuery(new Term("join_field", type)), booleanQuery.clauses().get(1).query());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -110,15 +110,26 @@ final class PercolateQuery extends Query implements Accountable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException {
|
public ScorerSupplier scorerSupplier(LeafReaderContext leafReaderContext) throws IOException {
|
||||||
final Scorer approximation = candidateMatchesWeight.scorer(leafReaderContext);
|
final ScorerSupplier approximationSupplier = candidateMatchesWeight.scorerSupplier(leafReaderContext);
|
||||||
if (approximation == null) {
|
if (approximationSupplier == null) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ScorerSupplier verifiedDocsScorer;
|
||||||
|
if (scoreMode.needsScores()) {
|
||||||
|
verifiedDocsScorer = null;
|
||||||
|
} else {
|
||||||
|
verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new ScorerSupplier() {
|
||||||
|
@Override
|
||||||
|
public Scorer get(long leadCost) throws IOException {
|
||||||
|
final Scorer approximation = approximationSupplier.get(leadCost);
|
||||||
final CheckedFunction<Integer, Query, IOException> percolatorQueries = queryStore.getQueries(leafReaderContext);
|
final CheckedFunction<Integer, Query, IOException> percolatorQueries = queryStore.getQueries(leafReaderContext);
|
||||||
if (scoreMode.needsScores()) {
|
if (scoreMode.needsScores()) {
|
||||||
return new BaseScorer(this, approximation) {
|
return new BaseScorer(approximation) {
|
||||||
|
|
||||||
float score;
|
float score;
|
||||||
|
|
||||||
|
@ -149,9 +160,8 @@ final class PercolateQuery extends Query implements Accountable {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
ScorerSupplier verifiedDocsScorer = verifiedMatchesWeight.scorerSupplier(leafReaderContext);
|
|
||||||
Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer);
|
Bits verifiedDocsBits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), verifiedDocsScorer);
|
||||||
return new BaseScorer(this, approximation) {
|
return new BaseScorer(approximation) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public float score() throws IOException {
|
public float score() throws IOException {
|
||||||
|
@ -172,7 +182,9 @@ final class PercolateQuery extends Query implements Accountable {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (nonNestedDocsFilter != null) {
|
if (nonNestedDocsFilter != null) {
|
||||||
query = new BooleanQuery.Builder().add(query, Occur.MUST).add(nonNestedDocsFilter, Occur.FILTER).build();
|
query = new BooleanQuery.Builder().add(query, Occur.MUST)
|
||||||
|
.add(nonNestedDocsFilter, Occur.FILTER)
|
||||||
|
.build();
|
||||||
}
|
}
|
||||||
return Lucene.exists(percolatorIndexSearcher, query);
|
return Lucene.exists(percolatorIndexSearcher, query);
|
||||||
}
|
}
|
||||||
|
@ -180,6 +192,13 @@ final class PercolateQuery extends Query implements Accountable {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long cost() {
|
||||||
|
return approximationSupplier.cost();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isCacheable(LeafReaderContext ctx) {
|
public boolean isCacheable(LeafReaderContext ctx) {
|
||||||
// This query uses a significant amount of memory, let's never
|
// This query uses a significant amount of memory, let's never
|
||||||
|
@ -265,8 +284,7 @@ final class PercolateQuery extends Query implements Accountable {
|
||||||
|
|
||||||
final Scorer approximation;
|
final Scorer approximation;
|
||||||
|
|
||||||
BaseScorer(Weight weight, Scorer approximation) {
|
BaseScorer(Scorer approximation) {
|
||||||
super(weight);
|
|
||||||
this.approximation = approximation;
|
this.approximation = approximation;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -294,7 +294,7 @@ public class PercolatorFieldMapper extends FieldMapper {
|
||||||
List<BytesRef> extractedTerms = t.v1();
|
List<BytesRef> extractedTerms = t.v1();
|
||||||
Map<String, List<byte[]>> encodedPointValuesByField = t.v2();
|
Map<String, List<byte[]>> encodedPointValuesByField = t.v2();
|
||||||
// `1 + ` is needed to take into account the EXTRACTION_FAILED should clause
|
// `1 + ` is needed to take into account the EXTRACTION_FAILED should clause
|
||||||
boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= BooleanQuery
|
boolean canUseMinimumShouldMatchField = 1 + extractedTerms.size() + encodedPointValuesByField.size() <= IndexSearcher
|
||||||
.getMaxClauseCount();
|
.getMaxClauseCount();
|
||||||
|
|
||||||
List<Query> subQueries = new ArrayList<>();
|
List<Query> subQueries = new ArrayList<>();
|
||||||
|
|
|
@ -91,7 +91,7 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase {
|
||||||
query = percolatorIndexSearcher.rewrite(query);
|
query = percolatorIndexSearcher.rewrite(query);
|
||||||
int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc();
|
int memoryIndexMaxDoc = percolatorIndexSearcher.getIndexReader().maxDoc();
|
||||||
TopDocs topDocs = percolatorIndexSearcher.search(query, memoryIndexMaxDoc, new Sort(SortField.FIELD_DOC));
|
TopDocs topDocs = percolatorIndexSearcher.search(query, memoryIndexMaxDoc, new Sort(SortField.FIELD_DOC));
|
||||||
if (topDocs.totalHits.value == 0) {
|
if (topDocs.totalHits.value() == 0) {
|
||||||
// This hit didn't match with a percolate query,
|
// This hit didn't match with a percolate query,
|
||||||
// likely to happen when percolating multiple documents
|
// likely to happen when percolating multiple documents
|
||||||
continue;
|
continue;
|
||||||
|
|
|
@ -8,7 +8,6 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.percolator;
|
package org.elasticsearch.percolator;
|
||||||
|
|
||||||
import org.apache.lucene.index.PrefixCodedTerms;
|
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.queries.spans.SpanOrQuery;
|
import org.apache.lucene.queries.spans.SpanOrQuery;
|
||||||
import org.apache.lucene.queries.spans.SpanTermQuery;
|
import org.apache.lucene.queries.spans.SpanTermQuery;
|
||||||
|
@ -26,12 +25,15 @@ import org.apache.lucene.search.SynonymQuery;
|
||||||
import org.apache.lucene.search.TermInSetQuery;
|
import org.apache.lucene.search.TermInSetQuery;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.util.BytesRef;
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
import org.apache.lucene.util.BytesRefIterator;
|
||||||
import org.apache.lucene.util.NumericUtils;
|
import org.apache.lucene.util.NumericUtils;
|
||||||
import org.apache.lucene.util.automaton.ByteRunAutomaton;
|
import org.apache.lucene.util.automaton.ByteRunAutomaton;
|
||||||
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
|
||||||
import org.elasticsearch.index.query.DateRangeIncludingNowQuery;
|
import org.elasticsearch.index.query.DateRangeIncludingNowQuery;
|
||||||
import org.elasticsearch.lucene.queries.BlendedTermQuery;
|
import org.elasticsearch.lucene.queries.BlendedTermQuery;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -162,7 +164,7 @@ final class QueryAnalyzer {
|
||||||
int minimumShouldMatchValue = 0;
|
int minimumShouldMatchValue = 0;
|
||||||
if (parent instanceof BooleanQuery bq) {
|
if (parent instanceof BooleanQuery bq) {
|
||||||
if (bq.getMinimumNumberShouldMatch() == 0
|
if (bq.getMinimumNumberShouldMatch() == 0
|
||||||
&& bq.clauses().stream().anyMatch(c -> c.getOccur() == Occur.MUST || c.getOccur() == Occur.FILTER)) {
|
&& bq.clauses().stream().anyMatch(c -> c.occur() == Occur.MUST || c.occur() == Occur.FILTER)) {
|
||||||
return QueryVisitor.EMPTY_VISITOR;
|
return QueryVisitor.EMPTY_VISITOR;
|
||||||
}
|
}
|
||||||
minimumShouldMatchValue = bq.getMinimumNumberShouldMatch();
|
minimumShouldMatchValue = bq.getMinimumNumberShouldMatch();
|
||||||
|
@ -198,12 +200,16 @@ final class QueryAnalyzer {
|
||||||
@Override
|
@Override
|
||||||
public void consumeTermsMatching(Query query, String field, Supplier<ByteRunAutomaton> automaton) {
|
public void consumeTermsMatching(Query query, String field, Supplier<ByteRunAutomaton> automaton) {
|
||||||
if (query instanceof TermInSetQuery q) {
|
if (query instanceof TermInSetQuery q) {
|
||||||
PrefixCodedTerms.TermIterator ti = q.getTermData().iterator();
|
BytesRefIterator bytesRefIterator = q.getBytesRefIterator();
|
||||||
BytesRef term;
|
BytesRef term;
|
||||||
Set<QueryExtraction> qe = new HashSet<>();
|
Set<QueryExtraction> qe = new HashSet<>();
|
||||||
while ((term = ti.next()) != null) {
|
try {
|
||||||
|
while ((term = bytesRefIterator.next()) != null) {
|
||||||
qe.add(new QueryExtraction(new Term(field, term)));
|
qe.add(new QueryExtraction(new Term(field, term)));
|
||||||
}
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
this.terms.add(new Result(true, qe, 1));
|
this.terms.add(new Result(true, qe, 1));
|
||||||
} else {
|
} else {
|
||||||
super.consumeTermsMatching(query, field, automaton);
|
super.consumeTermsMatching(query, field, automaton);
|
||||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.lucene.index.MultiTerms;
|
||||||
import org.apache.lucene.index.NoMergePolicy;
|
import org.apache.lucene.index.NoMergePolicy;
|
||||||
import org.apache.lucene.index.NumericDocValues;
|
import org.apache.lucene.index.NumericDocValues;
|
||||||
import org.apache.lucene.index.PostingsEnum;
|
import org.apache.lucene.index.PostingsEnum;
|
||||||
|
import org.apache.lucene.index.StoredFields;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
import org.apache.lucene.index.TermsEnum;
|
import org.apache.lucene.index.TermsEnum;
|
||||||
import org.apache.lucene.index.memory.MemoryIndex;
|
import org.apache.lucene.index.memory.MemoryIndex;
|
||||||
|
@ -56,6 +57,7 @@ import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.QueryVisitor;
|
import org.apache.lucene.search.QueryVisitor;
|
||||||
import org.apache.lucene.search.ScoreMode;
|
import org.apache.lucene.search.ScoreMode;
|
||||||
import org.apache.lucene.search.Scorer;
|
import org.apache.lucene.search.Scorer;
|
||||||
|
import org.apache.lucene.search.ScorerSupplier;
|
||||||
import org.apache.lucene.search.Sort;
|
import org.apache.lucene.search.Sort;
|
||||||
import org.apache.lucene.search.SortField;
|
import org.apache.lucene.search.SortField;
|
||||||
import org.apache.lucene.search.TermInSetQuery;
|
import org.apache.lucene.search.TermInSetQuery;
|
||||||
|
@ -246,15 +248,13 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
queryFunctions.add(
|
queryFunctions.add(
|
||||||
() -> new TermInSetQuery(
|
() -> new TermInSetQuery(
|
||||||
field1,
|
field1,
|
||||||
new BytesRef(randomFrom(stringContent.get(field1))),
|
List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1))))
|
||||||
new BytesRef(randomFrom(stringContent.get(field1)))
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
queryFunctions.add(
|
queryFunctions.add(
|
||||||
() -> new TermInSetQuery(
|
() -> new TermInSetQuery(
|
||||||
field2,
|
field2,
|
||||||
new BytesRef(randomFrom(stringContent.get(field1))),
|
List.of(new BytesRef(randomFrom(stringContent.get(field1))), new BytesRef(randomFrom(stringContent.get(field1))))
|
||||||
new BytesRef(randomFrom(stringContent.get(field1)))
|
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
// many iterations with boolean queries, which are the most complex queries to deal with when nested
|
// many iterations with boolean queries, which are the most complex queries to deal with when nested
|
||||||
|
@ -647,7 +647,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
v
|
v
|
||||||
);
|
);
|
||||||
TopDocs topDocs = shardSearcher.search(query, 1);
|
TopDocs topDocs = shardSearcher.search(query, 1);
|
||||||
assertEquals(1L, topDocs.totalHits.value);
|
assertEquals(1L, topDocs.totalHits.value());
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||||
|
|
||||||
|
@ -655,7 +655,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
percolateSearcher = memoryIndex.createSearcher();
|
percolateSearcher = memoryIndex.createSearcher();
|
||||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
|
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
|
||||||
topDocs = shardSearcher.search(query, 1);
|
topDocs = shardSearcher.search(query, 1);
|
||||||
assertEquals(1L, topDocs.totalHits.value);
|
assertEquals(1L, topDocs.totalHits.value());
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertEquals(1, topDocs.scoreDocs[0].doc);
|
assertEquals(1, topDocs.scoreDocs[0].doc);
|
||||||
|
|
||||||
|
@ -663,7 +663,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
percolateSearcher = memoryIndex.createSearcher();
|
percolateSearcher = memoryIndex.createSearcher();
|
||||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
|
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
|
||||||
topDocs = shardSearcher.search(query, 1);
|
topDocs = shardSearcher.search(query, 1);
|
||||||
assertEquals(1L, topDocs.totalHits.value);
|
assertEquals(1L, topDocs.totalHits.value());
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertEquals(2, topDocs.scoreDocs[0].doc);
|
assertEquals(2, topDocs.scoreDocs[0].doc);
|
||||||
|
|
||||||
|
@ -671,7 +671,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
percolateSearcher = memoryIndex.createSearcher();
|
percolateSearcher = memoryIndex.createSearcher();
|
||||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
|
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
|
||||||
topDocs = shardSearcher.search(query, 1);
|
topDocs = shardSearcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits.value);
|
assertEquals(1, topDocs.totalHits.value());
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertEquals(3, topDocs.scoreDocs[0].doc);
|
assertEquals(3, topDocs.scoreDocs[0].doc);
|
||||||
|
|
||||||
|
@ -679,7 +679,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
percolateSearcher = memoryIndex.createSearcher();
|
percolateSearcher = memoryIndex.createSearcher();
|
||||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
|
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
|
||||||
topDocs = shardSearcher.search(query, 1);
|
topDocs = shardSearcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits.value);
|
assertEquals(1, topDocs.totalHits.value());
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertEquals(4, topDocs.scoreDocs[0].doc);
|
assertEquals(4, topDocs.scoreDocs[0].doc);
|
||||||
|
|
||||||
|
@ -690,7 +690,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
percolateSearcher = memoryIndex.createSearcher();
|
percolateSearcher = memoryIndex.createSearcher();
|
||||||
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
|
query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, false, v);
|
||||||
topDocs = shardSearcher.search(query, 1);
|
topDocs = shardSearcher.search(query, 1);
|
||||||
assertEquals(1, topDocs.totalHits.value);
|
assertEquals(1, topDocs.totalHits.value());
|
||||||
assertEquals(1, topDocs.scoreDocs.length);
|
assertEquals(1, topDocs.scoreDocs.length);
|
||||||
assertEquals(5, topDocs.scoreDocs[0].doc);
|
assertEquals(5, topDocs.scoreDocs[0].doc);
|
||||||
}
|
}
|
||||||
|
@ -836,14 +836,14 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
IndexVersion.current()
|
IndexVersion.current()
|
||||||
);
|
);
|
||||||
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
||||||
assertEquals(3L, topDocs.totalHits.value);
|
assertEquals(3L, topDocs.totalHits.value());
|
||||||
assertEquals(3, topDocs.scoreDocs.length);
|
assertEquals(3, topDocs.scoreDocs.length);
|
||||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||||
assertEquals(1, topDocs.scoreDocs[1].doc);
|
assertEquals(1, topDocs.scoreDocs[1].doc);
|
||||||
assertEquals(4, topDocs.scoreDocs[2].doc);
|
assertEquals(4, topDocs.scoreDocs[2].doc);
|
||||||
|
|
||||||
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
|
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
|
||||||
assertEquals(3L, topDocs.totalHits.value);
|
assertEquals(3L, topDocs.totalHits.value());
|
||||||
assertEquals(3, topDocs.scoreDocs.length);
|
assertEquals(3, topDocs.scoreDocs.length);
|
||||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||||
assertEquals(1, topDocs.scoreDocs[1].doc);
|
assertEquals(1, topDocs.scoreDocs[1].doc);
|
||||||
|
@ -875,7 +875,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
IndexVersion.current()
|
IndexVersion.current()
|
||||||
);
|
);
|
||||||
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
||||||
assertEquals(2L, topDocs.totalHits.value);
|
assertEquals(2L, topDocs.totalHits.value());
|
||||||
assertEquals(2, topDocs.scoreDocs.length);
|
assertEquals(2, topDocs.scoreDocs.length);
|
||||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||||
assertEquals(2, topDocs.scoreDocs[1].doc);
|
assertEquals(2, topDocs.scoreDocs[1].doc);
|
||||||
|
@ -931,15 +931,15 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
v
|
v
|
||||||
);
|
);
|
||||||
BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery();
|
BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery();
|
||||||
assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(CoveringQuery.class));
|
assertThat(candidateQuery.clauses().get(0).query(), instanceOf(CoveringQuery.class));
|
||||||
TopDocs topDocs = shardSearcher.search(query, 10);
|
TopDocs topDocs = shardSearcher.search(query, 10);
|
||||||
assertEquals(2L, topDocs.totalHits.value);
|
assertEquals(2L, topDocs.totalHits.value());
|
||||||
assertEquals(2, topDocs.scoreDocs.length);
|
assertEquals(2, topDocs.scoreDocs.length);
|
||||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||||
assertEquals(2, topDocs.scoreDocs[1].doc);
|
assertEquals(2, topDocs.scoreDocs[1].doc);
|
||||||
|
|
||||||
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
|
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
|
||||||
assertEquals(2L, topDocs.totalHits.value);
|
assertEquals(2L, topDocs.totalHits.value());
|
||||||
assertEquals(2, topDocs.scoreDocs.length);
|
assertEquals(2, topDocs.scoreDocs.length);
|
||||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||||
assertEquals(2, topDocs.scoreDocs[1].doc);
|
assertEquals(2, topDocs.scoreDocs[1].doc);
|
||||||
|
@ -947,10 +947,10 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
// This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery:
|
// This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery:
|
||||||
int origMaxClauseCount = BooleanQuery.getMaxClauseCount();
|
int origMaxClauseCount = IndexSearcher.getMaxClauseCount();
|
||||||
try (Directory directory = new ByteBuffersDirectory()) {
|
try (Directory directory = new ByteBuffersDirectory()) {
|
||||||
final int maxClauseCount = 100;
|
final int maxClauseCount = 100;
|
||||||
BooleanQuery.setMaxClauseCount(maxClauseCount);
|
IndexSearcher.setMaxClauseCount(maxClauseCount);
|
||||||
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
|
try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) {
|
||||||
Document document = new Document();
|
Document document = new Document();
|
||||||
for (int i = 0; i < maxClauseCount; i++) {
|
for (int i = 0; i < maxClauseCount; i++) {
|
||||||
|
@ -970,22 +970,22 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
v
|
v
|
||||||
);
|
);
|
||||||
BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery();
|
BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery();
|
||||||
assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class));
|
assertThat(candidateQuery.clauses().get(0).query(), instanceOf(TermInSetQuery.class));
|
||||||
|
|
||||||
TopDocs topDocs = shardSearcher.search(query, 10);
|
TopDocs topDocs = shardSearcher.search(query, 10);
|
||||||
assertEquals(2L, topDocs.totalHits.value);
|
assertEquals(2L, topDocs.totalHits.value());
|
||||||
assertEquals(2, topDocs.scoreDocs.length);
|
assertEquals(2, topDocs.scoreDocs.length);
|
||||||
assertEquals(1, topDocs.scoreDocs[0].doc);
|
assertEquals(1, topDocs.scoreDocs[0].doc);
|
||||||
assertEquals(2, topDocs.scoreDocs[1].doc);
|
assertEquals(2, topDocs.scoreDocs[1].doc);
|
||||||
|
|
||||||
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
|
topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10);
|
||||||
assertEquals(2L, topDocs.totalHits.value);
|
assertEquals(2L, topDocs.totalHits.value());
|
||||||
assertEquals(2, topDocs.scoreDocs.length);
|
assertEquals(2, topDocs.scoreDocs.length);
|
||||||
assertEquals(1, topDocs.scoreDocs[0].doc);
|
assertEquals(1, topDocs.scoreDocs[0].doc);
|
||||||
assertEquals(2, topDocs.scoreDocs[1].doc);
|
assertEquals(2, topDocs.scoreDocs[1].doc);
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
BooleanQuery.setMaxClauseCount(origMaxClauseCount);
|
IndexSearcher.setMaxClauseCount(origMaxClauseCount);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1032,7 +1032,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
|
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
|
||||||
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
|
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
|
||||||
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
||||||
assertEquals(2L, topDocs.totalHits.value);
|
assertEquals(2L, topDocs.totalHits.value());
|
||||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||||
assertEquals(1, topDocs.scoreDocs[1].doc);
|
assertEquals(1, topDocs.scoreDocs[1].doc);
|
||||||
}
|
}
|
||||||
|
@ -1066,7 +1066,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
|
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
|
||||||
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
|
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
|
||||||
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
||||||
assertEquals(1L, topDocs.totalHits.value);
|
assertEquals(1L, topDocs.totalHits.value());
|
||||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||||
|
|
||||||
memoryIndex = new MemoryIndex();
|
memoryIndex = new MemoryIndex();
|
||||||
|
@ -1074,7 +1074,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
percolateSearcher = memoryIndex.createSearcher();
|
percolateSearcher = memoryIndex.createSearcher();
|
||||||
query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
|
query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
|
||||||
topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
||||||
assertEquals(1L, topDocs.totalHits.value);
|
assertEquals(1L, topDocs.totalHits.value());
|
||||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||||
|
|
||||||
memoryIndex = new MemoryIndex();
|
memoryIndex = new MemoryIndex();
|
||||||
|
@ -1082,7 +1082,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
percolateSearcher = memoryIndex.createSearcher();
|
percolateSearcher = memoryIndex.createSearcher();
|
||||||
query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
|
query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
|
||||||
topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
||||||
assertEquals(1L, topDocs.totalHits.value);
|
assertEquals(1L, topDocs.totalHits.value());
|
||||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1117,7 +1117,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
|
IndexSearcher percolateSearcher = memoryIndex.createSearcher();
|
||||||
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
|
PercolateQuery query = (PercolateQuery) fieldType.percolateQuery("_name", queryStore, sources, percolateSearcher, false, v);
|
||||||
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
TopDocs topDocs = shardSearcher.search(query, 10, new Sort(SortField.FIELD_DOC));
|
||||||
assertEquals(1L, topDocs.totalHits.value);
|
assertEquals(1L, topDocs.totalHits.value());
|
||||||
assertEquals(0, topDocs.scoreDocs[0].doc);
|
assertEquals(0, topDocs.scoreDocs[0].doc);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1141,7 +1141,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
TopDocs controlTopDocs = shardSearcher.search(controlQuery, 100);
|
TopDocs controlTopDocs = shardSearcher.search(controlQuery, 100);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
assertThat(topDocs.totalHits.value, equalTo(controlTopDocs.totalHits.value));
|
assertThat(topDocs.totalHits.value(), equalTo(controlTopDocs.totalHits.value()));
|
||||||
assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length));
|
assertThat(topDocs.scoreDocs.length, equalTo(controlTopDocs.scoreDocs.length));
|
||||||
for (int j = 0; j < topDocs.scoreDocs.length; j++) {
|
for (int j = 0; j < topDocs.scoreDocs.length; j++) {
|
||||||
assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc));
|
assertThat(topDocs.scoreDocs[j].doc, equalTo(controlTopDocs.scoreDocs[j].doc));
|
||||||
|
@ -1164,12 +1164,13 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
logger.error("topDocs.scoreDocs[{}].doc={}", i, topDocs.scoreDocs[i].doc);
|
logger.error("topDocs.scoreDocs[{}].doc={}", i, topDocs.scoreDocs[i].doc);
|
||||||
logger.error("topDocs.scoreDocs[{}].score={}", i, topDocs.scoreDocs[i].score);
|
logger.error("topDocs.scoreDocs[{}].score={}", i, topDocs.scoreDocs[i].score);
|
||||||
}
|
}
|
||||||
|
StoredFields storedFields = shardSearcher.storedFields();
|
||||||
for (int i = 0; i < controlTopDocs.scoreDocs.length; i++) {
|
for (int i = 0; i < controlTopDocs.scoreDocs.length; i++) {
|
||||||
logger.error("controlTopDocs.scoreDocs[{}].doc={}", i, controlTopDocs.scoreDocs[i].doc);
|
logger.error("controlTopDocs.scoreDocs[{}].doc={}", i, controlTopDocs.scoreDocs[i].doc);
|
||||||
logger.error("controlTopDocs.scoreDocs[{}].score={}", i, controlTopDocs.scoreDocs[i].score);
|
logger.error("controlTopDocs.scoreDocs[{}].score={}", i, controlTopDocs.scoreDocs[i].score);
|
||||||
|
|
||||||
// Additional stored information that is useful when debugging:
|
// Additional stored information that is useful when debugging:
|
||||||
String queryToString = shardSearcher.doc(controlTopDocs.scoreDocs[i].doc).get("query_to_string");
|
String queryToString = storedFields.document(controlTopDocs.scoreDocs[i].doc).get("query_to_string");
|
||||||
logger.error("controlTopDocs.scoreDocs[{}].query_to_string={}", i, queryToString);
|
logger.error("controlTopDocs.scoreDocs[{}].query_to_string={}", i, queryToString);
|
||||||
|
|
||||||
TermsEnum tenum = MultiTerms.getTerms(shardSearcher.getIndexReader(), fieldType.queryTermsField.name()).iterator();
|
TermsEnum tenum = MultiTerms.getTerms(shardSearcher.getIndexReader(), fieldType.queryTermsField.name()).iterator();
|
||||||
|
@ -1289,7 +1290,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Scorer scorer(LeafReaderContext context) throws IOException {
|
public ScorerSupplier scorerSupplier(LeafReaderContext context) throws IOException {
|
||||||
float _score[] = new float[] { boost };
|
float _score[] = new float[] { boost };
|
||||||
DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc());
|
DocIdSetIterator allDocs = DocIdSetIterator.all(context.reader().maxDoc());
|
||||||
CheckedFunction<Integer, Query, IOException> leaf = queryStore.getQueries(context);
|
CheckedFunction<Integer, Query, IOException> leaf = queryStore.getQueries(context);
|
||||||
|
@ -1313,7 +1314,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
return new Scorer(this) {
|
Scorer scorer = new Scorer() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int docID() {
|
public int docID() {
|
||||||
|
@ -1335,6 +1336,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
|
||||||
return _score[0];
|
return _score[0];
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
return new DefaultScorerSupplier(scorer);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -118,7 +118,7 @@ public class PercolateQueryTests extends ESTestCase {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
TopDocs topDocs = shardSearcher.search(query, 10);
|
TopDocs topDocs = shardSearcher.search(query, 10);
|
||||||
assertThat(topDocs.totalHits.value, equalTo(1L));
|
assertThat(topDocs.totalHits.value(), equalTo(1L));
|
||||||
assertThat(topDocs.scoreDocs.length, equalTo(1));
|
assertThat(topDocs.scoreDocs.length, equalTo(1));
|
||||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(0));
|
assertThat(topDocs.scoreDocs[0].doc, equalTo(0));
|
||||||
Explanation explanation = shardSearcher.explain(query, 0);
|
Explanation explanation = shardSearcher.explain(query, 0);
|
||||||
|
@ -137,7 +137,7 @@ public class PercolateQueryTests extends ESTestCase {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
topDocs = shardSearcher.search(query, 10);
|
topDocs = shardSearcher.search(query, 10);
|
||||||
assertThat(topDocs.totalHits.value, equalTo(3L));
|
assertThat(topDocs.totalHits.value(), equalTo(3L));
|
||||||
assertThat(topDocs.scoreDocs.length, equalTo(3));
|
assertThat(topDocs.scoreDocs.length, equalTo(3));
|
||||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
|
assertThat(topDocs.scoreDocs[0].doc, equalTo(1));
|
||||||
explanation = shardSearcher.explain(query, 1);
|
explanation = shardSearcher.explain(query, 1);
|
||||||
|
@ -166,7 +166,7 @@ public class PercolateQueryTests extends ESTestCase {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
topDocs = shardSearcher.search(query, 10);
|
topDocs = shardSearcher.search(query, 10);
|
||||||
assertThat(topDocs.totalHits.value, equalTo(4L));
|
assertThat(topDocs.totalHits.value(), equalTo(4L));
|
||||||
|
|
||||||
query = new PercolateQuery(
|
query = new PercolateQuery(
|
||||||
"_name",
|
"_name",
|
||||||
|
@ -178,7 +178,7 @@ public class PercolateQueryTests extends ESTestCase {
|
||||||
new MatchNoDocsQuery("")
|
new MatchNoDocsQuery("")
|
||||||
);
|
);
|
||||||
topDocs = shardSearcher.search(query, 10);
|
topDocs = shardSearcher.search(query, 10);
|
||||||
assertThat(topDocs.totalHits.value, equalTo(3L));
|
assertThat(topDocs.totalHits.value(), equalTo(3L));
|
||||||
assertThat(topDocs.scoreDocs.length, equalTo(3));
|
assertThat(topDocs.scoreDocs.length, equalTo(3));
|
||||||
assertThat(topDocs.scoreDocs[0].doc, equalTo(3));
|
assertThat(topDocs.scoreDocs[0].doc, equalTo(3));
|
||||||
explanation = shardSearcher.explain(query, 3);
|
explanation = shardSearcher.explain(query, 3);
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.sandbox.document.HalfFloatPoint;
|
||||||
import org.apache.lucene.sandbox.search.CoveringQuery;
|
import org.apache.lucene.sandbox.search.CoveringQuery;
|
||||||
import org.apache.lucene.search.BooleanClause.Occur;
|
import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
import org.apache.lucene.search.BooleanQuery;
|
import org.apache.lucene.search.BooleanQuery;
|
||||||
|
import org.apache.lucene.search.IndexSearcher;
|
||||||
import org.apache.lucene.search.PhraseQuery;
|
import org.apache.lucene.search.PhraseQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermInSetQuery;
|
import org.apache.lucene.search.TermInSetQuery;
|
||||||
|
@ -417,10 +418,10 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCreateCandidateQuery() throws Exception {
|
public void testCreateCandidateQuery() throws Exception {
|
||||||
int origMaxClauseCount = BooleanQuery.getMaxClauseCount();
|
int origMaxClauseCount = IndexSearcher.getMaxClauseCount();
|
||||||
try {
|
try {
|
||||||
final int maxClauseCount = 100;
|
final int maxClauseCount = 100;
|
||||||
BooleanQuery.setMaxClauseCount(maxClauseCount);
|
IndexSearcher.setMaxClauseCount(maxClauseCount);
|
||||||
addQueryFieldMappings();
|
addQueryFieldMappings();
|
||||||
|
|
||||||
MemoryIndex memoryIndex = new MemoryIndex(false);
|
MemoryIndex memoryIndex = new MemoryIndex(false);
|
||||||
|
@ -435,8 +436,8 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
Tuple<BooleanQuery, Boolean> t = fieldType.createCandidateQuery(indexReader);
|
Tuple<BooleanQuery, Boolean> t = fieldType.createCandidateQuery(indexReader);
|
||||||
assertTrue(t.v2());
|
assertTrue(t.v2());
|
||||||
assertEquals(2, t.v1().clauses().size());
|
assertEquals(2, t.v1().clauses().size());
|
||||||
assertThat(t.v1().clauses().get(0).getQuery(), instanceOf(CoveringQuery.class));
|
assertThat(t.v1().clauses().get(0).query(), instanceOf(CoveringQuery.class));
|
||||||
assertThat(t.v1().clauses().get(1).getQuery(), instanceOf(TermQuery.class));
|
assertThat(t.v1().clauses().get(1).query(), instanceOf(TermQuery.class));
|
||||||
|
|
||||||
// Now push it over the edge, so that it falls back using TermInSetQuery
|
// Now push it over the edge, so that it falls back using TermInSetQuery
|
||||||
memoryIndex.addField("field2", "value", new WhitespaceAnalyzer());
|
memoryIndex.addField("field2", "value", new WhitespaceAnalyzer());
|
||||||
|
@ -444,12 +445,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
t = fieldType.createCandidateQuery(indexReader);
|
t = fieldType.createCandidateQuery(indexReader);
|
||||||
assertFalse(t.v2());
|
assertFalse(t.v2());
|
||||||
assertEquals(3, t.v1().clauses().size());
|
assertEquals(3, t.v1().clauses().size());
|
||||||
TermInSetQuery terms = (TermInSetQuery) t.v1().clauses().get(0).getQuery();
|
TermInSetQuery terms = (TermInSetQuery) t.v1().clauses().get(0).query();
|
||||||
assertEquals(maxClauseCount - 1, terms.getTermData().size());
|
assertEquals(maxClauseCount - 1, terms.getTermsCount());
|
||||||
assertThat(t.v1().clauses().get(1).getQuery().toString(), containsString(fieldName + ".range_field:<ranges:"));
|
assertThat(t.v1().clauses().get(1).query().toString(), containsString(fieldName + ".range_field:<ranges:"));
|
||||||
assertThat(t.v1().clauses().get(2).getQuery().toString(), containsString(fieldName + ".extraction_result:failed"));
|
assertThat(t.v1().clauses().get(2).query().toString(), containsString(fieldName + ".extraction_result:failed"));
|
||||||
} finally {
|
} finally {
|
||||||
BooleanQuery.setMaxClauseCount(origMaxClauseCount);
|
IndexSearcher.setMaxClauseCount(origMaxClauseCount);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -409,7 +409,7 @@ public class PercolatorQuerySearchTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
QueryBuilder query = new PercolateQueryBuilder("my_query", List.of(house1_doc, house2_doc), XContentType.JSON);
|
QueryBuilder query = new PercolateQueryBuilder("my_query", List.of(house1_doc, house2_doc), XContentType.JSON);
|
||||||
assertResponse(client().prepareSearch("houses").setQuery(query), response -> {
|
assertResponse(client().prepareSearch("houses").setQuery(query), response -> {
|
||||||
assertEquals(2, response.getHits().getTotalHits().value);
|
assertEquals(2, response.getHits().getTotalHits().value());
|
||||||
|
|
||||||
SearchHit[] hits = response.getHits().getHits();
|
SearchHit[] hits = response.getHits().getHits();
|
||||||
assertThat(hits[0].getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1)));
|
assertThat(hits[0].getFields().get("_percolator_document_slot").getValues(), equalTo(Arrays.asList(0, 1)));
|
||||||
|
|
|
@ -82,7 +82,7 @@ public class QueryAnalyzerTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testExtractQueryMetadata_termsQuery() {
|
public void testExtractQueryMetadata_termsQuery() {
|
||||||
TermInSetQuery termsQuery = new TermInSetQuery("_field", new BytesRef("_term1"), new BytesRef("_term2"));
|
TermInSetQuery termsQuery = new TermInSetQuery("_field", List.of(new BytesRef("_term1"), new BytesRef("_term2")));
|
||||||
Result result = analyze(termsQuery);
|
Result result = analyze(termsQuery);
|
||||||
assertThat(result.verified, is(true));
|
assertThat(result.verified, is(true));
|
||||||
assertThat(result.minimumShouldMatch, equalTo(1));
|
assertThat(result.minimumShouldMatch, equalTo(1));
|
||||||
|
|
|
@ -70,7 +70,7 @@ public class CrossClusterReindexIT extends AbstractMultiClustersTestCase {
|
||||||
final TotalHits totalHits = SearchResponseUtils.getTotalHits(
|
final TotalHits totalHits = SearchResponseUtils.getTotalHits(
|
||||||
client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000)
|
client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000)
|
||||||
);
|
);
|
||||||
return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber;
|
return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber;
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ public class CrossClusterReindexIT extends AbstractMultiClustersTestCase {
|
||||||
final TotalHits totalHits = SearchResponseUtils.getTotalHits(
|
final TotalHits totalHits = SearchResponseUtils.getTotalHits(
|
||||||
client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000)
|
client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000)
|
||||||
);
|
);
|
||||||
return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber;
|
return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber;
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,7 +114,7 @@ public class CrossClusterReindexIT extends AbstractMultiClustersTestCase {
|
||||||
final TotalHits totalHits = SearchResponseUtils.getTotalHits(
|
final TotalHits totalHits = SearchResponseUtils.getTotalHits(
|
||||||
client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000)
|
client(LOCAL_CLUSTER).prepareSearch("test-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000)
|
||||||
);
|
);
|
||||||
return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber;
|
return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber;
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -146,7 +146,7 @@ public class CrossClusterReindexIT extends AbstractMultiClustersTestCase {
|
||||||
final TotalHits totalHits = SearchResponseUtils.getTotalHits(
|
final TotalHits totalHits = SearchResponseUtils.getTotalHits(
|
||||||
client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000)
|
client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000)
|
||||||
);
|
);
|
||||||
return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber;
|
return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber;
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -162,7 +162,7 @@ public class CrossClusterReindexIT extends AbstractMultiClustersTestCase {
|
||||||
final TotalHits totalHits = SearchResponseUtils.getTotalHits(
|
final TotalHits totalHits = SearchResponseUtils.getTotalHits(
|
||||||
client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000)
|
client(LOCAL_CLUSTER).prepareSearch("desc-index-001").setQuery(new MatchAllQueryBuilder()).setSize(1000)
|
||||||
);
|
);
|
||||||
return totalHits.relation == TotalHits.Relation.EQUAL_TO && totalHits.value == docsNumber;
|
return totalHits.relation() == TotalHits.Relation.EQUAL_TO && totalHits.value() == docsNumber;
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,6 @@ package org.elasticsearch.reindex;
|
||||||
import org.apache.lucene.util.automaton.Automata;
|
import org.apache.lucene.util.automaton.Automata;
|
||||||
import org.apache.lucene.util.automaton.Automaton;
|
import org.apache.lucene.util.automaton.Automaton;
|
||||||
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
|
import org.apache.lucene.util.automaton.CharacterRunAutomaton;
|
||||||
import org.apache.lucene.util.automaton.MinimizationOperations;
|
|
||||||
import org.apache.lucene.util.automaton.Operations;
|
import org.apache.lucene.util.automaton.Operations;
|
||||||
import org.elasticsearch.action.ActionRequestValidationException;
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
import org.elasticsearch.action.DocWriteRequest;
|
import org.elasticsearch.action.DocWriteRequest;
|
||||||
|
@ -96,7 +95,7 @@ public class ReindexValidator {
|
||||||
return new CharacterRunAutomaton(Automata.makeEmpty());
|
return new CharacterRunAutomaton(Automata.makeEmpty());
|
||||||
}
|
}
|
||||||
Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY));
|
Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY));
|
||||||
automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
|
automaton = Operations.determinize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
|
||||||
if (Operations.isTotal(automaton)) {
|
if (Operations.isTotal(automaton)) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException(
|
||||||
"Refusing to start because whitelist "
|
"Refusing to start because whitelist "
|
||||||
|
|
|
@ -97,8 +97,8 @@ final class RemoteResponseParsers {
|
||||||
HITS_PARSER.declareField(constructorArg(), (p, c) -> {
|
HITS_PARSER.declareField(constructorArg(), (p, c) -> {
|
||||||
if (p.currentToken() == XContentParser.Token.START_OBJECT) {
|
if (p.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||||
final TotalHits totalHits = SearchHits.parseTotalHitsFragment(p);
|
final TotalHits totalHits = SearchHits.parseTotalHitsFragment(p);
|
||||||
assert totalHits.relation == TotalHits.Relation.EQUAL_TO;
|
assert totalHits.relation() == TotalHits.Relation.EQUAL_TO;
|
||||||
return totalHits.value;
|
return totalHits.value();
|
||||||
} else {
|
} else {
|
||||||
// For BWC with nodes pre 7.0
|
// For BWC with nodes pre 7.0
|
||||||
return p.longValue();
|
return p.longValue();
|
||||||
|
|
|
@ -242,7 +242,7 @@ public class AnnotatedTextFieldMapperTests extends MapperTestCase {
|
||||||
|
|
||||||
withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), reader -> {
|
withLuceneIndex(mapperService, iw -> iw.addDocument(doc.rootDoc()), reader -> {
|
||||||
LeafReader leaf = reader.leaves().get(0).reader();
|
LeafReader leaf = reader.leaves().get(0).reader();
|
||||||
Terms terms = leaf.getTermVector(0, "field");
|
Terms terms = leaf.termVectors().get(0, "field");
|
||||||
TermsEnum iterator = terms.iterator();
|
TermsEnum iterator = terms.iterator();
|
||||||
BytesRef term;
|
BytesRef term;
|
||||||
Set<String> foundTerms = new HashSet<>();
|
Set<String> foundTerms = new HashSet<>();
|
||||||
|
|
|
@ -130,7 +130,7 @@ public class AnnotatedTextHighlighterTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER);
|
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 1, Sort.INDEXORDER);
|
||||||
assertThat(topDocs.totalHits.value, equalTo(1L));
|
assertThat(topDocs.totalHits.value(), equalTo(1L));
|
||||||
String rawValue = Strings.collectionToDelimitedString(plainTextForHighlighter, String.valueOf(MULTIVAL_SEP_CHAR));
|
String rawValue = Strings.collectionToDelimitedString(plainTextForHighlighter, String.valueOf(MULTIVAL_SEP_CHAR));
|
||||||
UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, hiliteAnalyzer);
|
UnifiedHighlighter.Builder builder = UnifiedHighlighter.builder(searcher, hiliteAnalyzer);
|
||||||
builder.withBreakIterator(() -> breakIterator);
|
builder.withBreakIterator(() -> breakIterator);
|
||||||
|
|
|
@ -27,7 +27,6 @@ public final class SmbMmapFsDirectoryFactory extends FsDirectoryFactory {
|
||||||
return new SmbDirectoryWrapper(
|
return new SmbDirectoryWrapper(
|
||||||
setPreload(
|
setPreload(
|
||||||
new MMapDirectory(location, lockFactory),
|
new MMapDirectory(location, lockFactory),
|
||||||
lockFactory,
|
|
||||||
new HashSet<>(indexSettings.getValue(IndexModule.INDEX_STORE_PRE_LOAD_SETTING))
|
new HashSet<>(indexSettings.getValue(IndexModule.INDEX_STORE_PRE_LOAD_SETTING))
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
|
@ -33,6 +33,7 @@ import org.elasticsearch.index.IndexVersion;
|
||||||
import org.elasticsearch.index.IndexVersions;
|
import org.elasticsearch.index.IndexVersions;
|
||||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||||
import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction;
|
import org.elasticsearch.rest.action.admin.indices.RestPutIndexTemplateAction;
|
||||||
|
import org.elasticsearch.search.SearchFeatures;
|
||||||
import org.elasticsearch.test.NotEqualMessageBuilder;
|
import org.elasticsearch.test.NotEqualMessageBuilder;
|
||||||
import org.elasticsearch.test.XContentTestUtils;
|
import org.elasticsearch.test.XContentTestUtils;
|
||||||
import org.elasticsearch.test.cluster.ElasticsearchCluster;
|
import org.elasticsearch.test.cluster.ElasticsearchCluster;
|
||||||
|
@ -1694,6 +1695,211 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This test ensures that search results on old indices using "persian" analyzer don't change
|
||||||
|
* after we introduce Lucene 10
|
||||||
|
*/
|
||||||
|
public void testPersianAnalyzerBWC() throws Exception {
|
||||||
|
var originalClusterLegacyPersianAnalyzer = oldClusterHasFeature(SearchFeatures.LUCENE_10_0_0_UPGRADE) == false;
|
||||||
|
assumeTrue("Don't run this test if both versions already support stemming", originalClusterLegacyPersianAnalyzer);
|
||||||
|
final String indexName = "test_persian_stemmer";
|
||||||
|
Settings idxSettings = indexSettings(1, 1).build();
|
||||||
|
String mapping = """
|
||||||
|
{
|
||||||
|
"properties": {
|
||||||
|
"textfield" : {
|
||||||
|
"type": "text",
|
||||||
|
"analyzer": "persian"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
String query = """
|
||||||
|
{
|
||||||
|
"query": {
|
||||||
|
"match": {
|
||||||
|
"textfield": "كتابها"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
if (isRunningAgainstOldCluster()) {
|
||||||
|
createIndex(client(), indexName, idxSettings, mapping);
|
||||||
|
ensureGreen(indexName);
|
||||||
|
|
||||||
|
assertOK(
|
||||||
|
client().performRequest(
|
||||||
|
newXContentRequest(
|
||||||
|
HttpMethod.POST,
|
||||||
|
"/" + indexName + "/" + "_doc/1",
|
||||||
|
(builder, params) -> builder.field("textfield", "كتابها")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assertOK(
|
||||||
|
client().performRequest(
|
||||||
|
newXContentRequest(
|
||||||
|
HttpMethod.POST,
|
||||||
|
"/" + indexName + "/" + "_doc/2",
|
||||||
|
(builder, params) -> builder.field("textfield", "كتاب")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
refresh(indexName);
|
||||||
|
|
||||||
|
assertNumHits(indexName, 2, 1);
|
||||||
|
|
||||||
|
Request searchRequest = new Request("POST", "/" + indexName + "/_search");
|
||||||
|
searchRequest.setJsonEntity(query);
|
||||||
|
assertTotalHits(1, entityAsMap(client().performRequest(searchRequest)));
|
||||||
|
} else {
|
||||||
|
// old index should still only return one doc
|
||||||
|
Request searchRequest = new Request("POST", "/" + indexName + "/_search");
|
||||||
|
searchRequest.setJsonEntity(query);
|
||||||
|
assertTotalHits(1, entityAsMap(client().performRequest(searchRequest)));
|
||||||
|
|
||||||
|
String newIndexName = indexName + "_new";
|
||||||
|
createIndex(client(), newIndexName, idxSettings, mapping);
|
||||||
|
ensureGreen(newIndexName);
|
||||||
|
|
||||||
|
assertOK(
|
||||||
|
client().performRequest(
|
||||||
|
newXContentRequest(
|
||||||
|
HttpMethod.POST,
|
||||||
|
"/" + newIndexName + "/" + "_doc/1",
|
||||||
|
(builder, params) -> builder.field("textfield", "كتابها")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assertOK(
|
||||||
|
client().performRequest(
|
||||||
|
newXContentRequest(
|
||||||
|
HttpMethod.POST,
|
||||||
|
"/" + newIndexName + "/" + "_doc/2",
|
||||||
|
(builder, params) -> builder.field("textfield", "كتاب")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
refresh(newIndexName);
|
||||||
|
|
||||||
|
searchRequest = new Request("POST", "/" + newIndexName + "/_search");
|
||||||
|
searchRequest.setJsonEntity(query);
|
||||||
|
assertTotalHits(2, entityAsMap(client().performRequest(searchRequest)));
|
||||||
|
|
||||||
|
// searching both indices (old and new analysis version) we should get 1 hit from the old and 2 from the new index
|
||||||
|
searchRequest = new Request("POST", "/" + indexName + "," + newIndexName + "/_search");
|
||||||
|
searchRequest.setJsonEntity(query);
|
||||||
|
assertTotalHits(3, entityAsMap(client().performRequest(searchRequest)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This test ensures that search results on old indices using "romanain" analyzer don't change
|
||||||
|
* after we introduce Lucene 10
|
||||||
|
*/
|
||||||
|
public void testRomanianAnalyzerBWC() throws Exception {
|
||||||
|
var originalClusterLegacyRomanianAnalyzer = oldClusterHasFeature(SearchFeatures.LUCENE_10_0_0_UPGRADE) == false;
|
||||||
|
assumeTrue("Don't run this test if both versions already support stemming", originalClusterLegacyRomanianAnalyzer);
|
||||||
|
final String indexName = "test_romanian_stemmer";
|
||||||
|
Settings idxSettings = indexSettings(1, 1).build();
|
||||||
|
String cedillaForm = "absenţa";
|
||||||
|
String commaForm = "absența";
|
||||||
|
|
||||||
|
String mapping = """
|
||||||
|
{
|
||||||
|
"properties": {
|
||||||
|
"textfield" : {
|
||||||
|
"type": "text",
|
||||||
|
"analyzer": "romanian"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
// query that uses the cedilla form of "t"
|
||||||
|
String query = """
|
||||||
|
{
|
||||||
|
"query": {
|
||||||
|
"match": {
|
||||||
|
"textfield": "absenţa"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
""";
|
||||||
|
|
||||||
|
if (isRunningAgainstOldCluster()) {
|
||||||
|
createIndex(client(), indexName, idxSettings, mapping);
|
||||||
|
ensureGreen(indexName);
|
||||||
|
|
||||||
|
assertOK(
|
||||||
|
client().performRequest(
|
||||||
|
newXContentRequest(
|
||||||
|
HttpMethod.POST,
|
||||||
|
"/" + indexName + "/" + "_doc/1",
|
||||||
|
(builder, params) -> builder.field("textfield", cedillaForm)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assertOK(
|
||||||
|
client().performRequest(
|
||||||
|
newXContentRequest(
|
||||||
|
HttpMethod.POST,
|
||||||
|
"/" + indexName + "/" + "_doc/2",
|
||||||
|
// this doc uses the comma form
|
||||||
|
(builder, params) -> builder.field("textfield", commaForm)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
refresh(indexName);
|
||||||
|
|
||||||
|
assertNumHits(indexName, 2, 1);
|
||||||
|
|
||||||
|
Request searchRequest = new Request("POST", "/" + indexName + "/_search");
|
||||||
|
searchRequest.setJsonEntity(query);
|
||||||
|
assertTotalHits(1, entityAsMap(client().performRequest(searchRequest)));
|
||||||
|
} else {
|
||||||
|
// old index should still only return one doc
|
||||||
|
Request searchRequest = new Request("POST", "/" + indexName + "/_search");
|
||||||
|
searchRequest.setJsonEntity(query);
|
||||||
|
assertTotalHits(1, entityAsMap(client().performRequest(searchRequest)));
|
||||||
|
|
||||||
|
String newIndexName = indexName + "_new";
|
||||||
|
createIndex(client(), newIndexName, idxSettings, mapping);
|
||||||
|
ensureGreen(newIndexName);
|
||||||
|
|
||||||
|
assertOK(
|
||||||
|
client().performRequest(
|
||||||
|
newXContentRequest(
|
||||||
|
HttpMethod.POST,
|
||||||
|
"/" + newIndexName + "/" + "_doc/1",
|
||||||
|
(builder, params) -> builder.field("textfield", cedillaForm)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
assertOK(
|
||||||
|
client().performRequest(
|
||||||
|
newXContentRequest(
|
||||||
|
HttpMethod.POST,
|
||||||
|
"/" + newIndexName + "/" + "_doc/2",
|
||||||
|
(builder, params) -> builder.field("textfield", commaForm)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
refresh(newIndexName);
|
||||||
|
|
||||||
|
searchRequest = new Request("POST", "/" + newIndexName + "/_search");
|
||||||
|
searchRequest.setJsonEntity(query);
|
||||||
|
assertTotalHits(2, entityAsMap(client().performRequest(searchRequest)));
|
||||||
|
|
||||||
|
// searching both indices (old and new analysis version) we should get 1 hit from the old and 2 from the new index
|
||||||
|
searchRequest = new Request("POST", "/" + indexName + "," + newIndexName + "/_search");
|
||||||
|
searchRequest.setJsonEntity(query);
|
||||||
|
assertTotalHits(3, entityAsMap(client().performRequest(searchRequest)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public void testForbidDisableSoftDeletesOnRestore() throws Exception {
|
public void testForbidDisableSoftDeletesOnRestore() throws Exception {
|
||||||
final String snapshot = "snapshot-" + index;
|
final String snapshot = "snapshot-" + index;
|
||||||
if (isRunningAgainstOldCluster()) {
|
if (isRunningAgainstOldCluster()) {
|
||||||
|
|
|
@ -55,6 +55,8 @@ tasks.named("precommit").configure {
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks.named("yamlRestCompatTestTransform").configure ({ task ->
|
tasks.named("yamlRestCompatTestTransform").configure ({ task ->
|
||||||
|
task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling")
|
||||||
|
task.replaceValueInMatch("profile.shards.0.dfs.knn.0.query.0.description", "DocAndScoreQuery[0,...][0.009673266,...],0.009673266", "dfs knn vector profiling with vector_operations_count")
|
||||||
task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility")
|
task.skipTest("indices.sort/10_basic/Index Sort", "warning does not exist for compatibility")
|
||||||
task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility")
|
task.skipTest("search/330_fetch_fields/Test search rewrite", "warning does not exist for compatibility")
|
||||||
})
|
})
|
||||||
|
|
|
@ -212,7 +212,6 @@ dfs knn vector profiling:
|
||||||
|
|
||||||
- match: { hits.total.value: 1 }
|
- match: { hits.total.value: 1 }
|
||||||
- match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" }
|
- match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" }
|
||||||
- match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" }
|
|
||||||
- gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 }
|
- gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 }
|
||||||
- match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 }
|
- match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 }
|
||||||
- match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score: 0 }
|
- match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score: 0 }
|
||||||
|
@ -235,6 +234,47 @@ dfs knn vector profiling:
|
||||||
- match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" }
|
- match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" }
|
||||||
- gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 }
|
- gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 }
|
||||||
|
|
||||||
|
---
|
||||||
|
dfs knn vector profiling description:
|
||||||
|
- requires:
|
||||||
|
cluster_features: ["lucene_10_upgrade"]
|
||||||
|
reason: "the profile description changed with Lucene 10"
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: images
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
index.number_of_shards: 1
|
||||||
|
mappings:
|
||||||
|
properties:
|
||||||
|
image:
|
||||||
|
type: "dense_vector"
|
||||||
|
dims: 3
|
||||||
|
index: true
|
||||||
|
similarity: "l2_norm"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: images
|
||||||
|
id: "1"
|
||||||
|
refresh: true
|
||||||
|
body:
|
||||||
|
image: [1, 5, -20]
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: images
|
||||||
|
body:
|
||||||
|
profile: true
|
||||||
|
knn:
|
||||||
|
field: "image"
|
||||||
|
query_vector: [-5, 9, -12]
|
||||||
|
k: 1
|
||||||
|
num_candidates: 100
|
||||||
|
|
||||||
|
- match: { hits.total.value: 1 }
|
||||||
|
- match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScoreQuery[0,...][0.009673266,...],0.009673266" }
|
||||||
|
|
||||||
---
|
---
|
||||||
dfs knn vector profiling with vector_operations_count:
|
dfs knn vector profiling with vector_operations_count:
|
||||||
- requires:
|
- requires:
|
||||||
|
@ -276,7 +316,6 @@ dfs knn vector profiling with vector_operations_count:
|
||||||
|
|
||||||
- match: { hits.total.value: 1 }
|
- match: { hits.total.value: 1 }
|
||||||
- match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" }
|
- match: { profile.shards.0.dfs.knn.0.query.0.type: "DocAndScoreQuery" }
|
||||||
- match: { profile.shards.0.dfs.knn.0.query.0.description: "DocAndScore[100]" }
|
|
||||||
- match: { profile.shards.0.dfs.knn.0.vector_operations_count: 1 }
|
- match: { profile.shards.0.dfs.knn.0.vector_operations_count: 1 }
|
||||||
- gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 }
|
- gt: { profile.shards.0.dfs.knn.0.query.0.time_in_nanos: 0 }
|
||||||
- match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 }
|
- match: { profile.shards.0.dfs.knn.0.query.0.breakdown.set_min_competitive_score_count: 0 }
|
||||||
|
@ -300,7 +339,6 @@ dfs knn vector profiling with vector_operations_count:
|
||||||
- match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" }
|
- match: { profile.shards.0.dfs.knn.0.collector.0.reason: "search_top_hits" }
|
||||||
- gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 }
|
- gt: { profile.shards.0.dfs.knn.0.collector.0.time_in_nanos: 0 }
|
||||||
|
|
||||||
|
|
||||||
---
|
---
|
||||||
dfs profile for search with dfs_query_then_fetch:
|
dfs profile for search with dfs_query_then_fetch:
|
||||||
- requires:
|
- requires:
|
||||||
|
|
|
@ -571,7 +571,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
|
||||||
SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.QUERY_THEN_FETCH);
|
SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.QUERY_THEN_FETCH);
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
internalCluster().coordOnlyNodeClient().search(searchRequest),
|
internalCluster().coordOnlyNodeClient().search(searchRequest),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L))
|
||||||
);
|
);
|
||||||
|
|
||||||
clearInterceptedActions();
|
clearInterceptedActions();
|
||||||
|
@ -601,7 +601,7 @@ public class IndicesRequestIT extends ESIntegTestCase {
|
||||||
SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.DFS_QUERY_THEN_FETCH);
|
SearchRequest searchRequest = new SearchRequest(randomIndicesOrAliases).searchType(SearchType.DFS_QUERY_THEN_FETCH);
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
internalCluster().coordOnlyNodeClient().search(searchRequest),
|
internalCluster().coordOnlyNodeClient().search(searchRequest),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, greaterThan(0L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), greaterThan(0L))
|
||||||
);
|
);
|
||||||
|
|
||||||
clearInterceptedActions();
|
clearInterceptedActions();
|
||||||
|
|
|
@ -306,8 +306,8 @@ public class CreateIndexIT extends ESIntegTestCase {
|
||||||
prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen())
|
prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen())
|
||||||
.setQuery(new RangeQueryBuilder("index_version").from(indexVersion.get(), true)),
|
.setQuery(new RangeQueryBuilder("index_version").from(indexVersion.get(), true)),
|
||||||
expected -> assertNoFailuresAndResponse(prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()), all -> {
|
expected -> assertNoFailuresAndResponse(prepareSearch("test").setIndicesOptions(IndicesOptions.lenientExpandOpen()), all -> {
|
||||||
assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value, all.getHits().getTotalHits().value);
|
assertEquals(expected + " vs. " + all, expected.getHits().getTotalHits().value(), all.getHits().getTotalHits().value());
|
||||||
logger.info("total: {}", expected.getHits().getTotalHits().value);
|
logger.info("total: {}", expected.getHits().getTotalHits().value());
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -253,7 +253,7 @@ public class SplitIndexIT extends ESIntegTestCase {
|
||||||
// now, do a nested query
|
// now, do a nested query
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch(index).setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)),
|
prepareSearch(index).setQuery(nestedQuery("nested1", termQuery("nested1.n_field1", "n_value1_1"), ScoreMode.Avg)),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numDocs))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) numDocs))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -141,11 +141,11 @@ public class BulkProcessor2RetryIT extends ESIntegTestCase {
|
||||||
assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> {
|
assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> {
|
||||||
assertThat(bulkProcessor.getTotalBytesInFlight(), equalTo(0L));
|
assertThat(bulkProcessor.getTotalBytesInFlight(), equalTo(0L));
|
||||||
if (rejectedExecutionExpected) {
|
if (rejectedExecutionExpected) {
|
||||||
assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps));
|
assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps));
|
||||||
} else if (finalRejectedAfterAllRetries) {
|
} else if (finalRejectedAfterAllRetries) {
|
||||||
assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps));
|
assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps));
|
||||||
} else {
|
} else {
|
||||||
assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps));
|
assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -136,11 +136,11 @@ public class BulkProcessorRetryIT extends ESIntegTestCase {
|
||||||
final boolean finalRejectedAfterAllRetries = rejectedAfterAllRetries;
|
final boolean finalRejectedAfterAllRetries = rejectedAfterAllRetries;
|
||||||
assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> {
|
assertResponse(prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0), results -> {
|
||||||
if (rejectedExecutionExpected) {
|
if (rejectedExecutionExpected) {
|
||||||
assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps));
|
assertThat((int) results.getHits().getTotalHits().value(), lessThanOrEqualTo(numberOfAsyncOps));
|
||||||
} else if (finalRejectedAfterAllRetries) {
|
} else if (finalRejectedAfterAllRetries) {
|
||||||
assertThat((int) results.getHits().getTotalHits().value, lessThan(numberOfAsyncOps));
|
assertThat((int) results.getHits().getTotalHits().value(), lessThan(numberOfAsyncOps));
|
||||||
} else {
|
} else {
|
||||||
assertThat((int) results.getHits().getTotalHits().value, equalTo(numberOfAsyncOps));
|
assertThat((int) results.getHits().getTotalHits().value(), equalTo(numberOfAsyncOps));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,7 +90,7 @@ public class IncrementalBulkIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> {
|
assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> {
|
||||||
assertNoFailures(searchResponse);
|
assertNoFailures(searchResponse);
|
||||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) 1));
|
assertThat(searchResponse.getHits().getTotalHits().value(), equalTo((long) 1));
|
||||||
});
|
});
|
||||||
|
|
||||||
assertFalse(refCounted.hasReferences());
|
assertFalse(refCounted.hasReferences());
|
||||||
|
@ -268,7 +268,7 @@ public class IncrementalBulkIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> {
|
assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> {
|
||||||
assertNoFailures(searchResponse);
|
assertNoFailures(searchResponse);
|
||||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(docs));
|
assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(docs));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -358,7 +358,7 @@ public class IncrementalBulkIT extends ESIntegTestCase {
|
||||||
|
|
||||||
assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> {
|
assertResponse(prepareSearch(index).setQuery(QueryBuilders.matchAllQuery()), searchResponse -> {
|
||||||
assertNoFailures(searchResponse);
|
assertNoFailures(searchResponse);
|
||||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(hits.get()));
|
assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(hits.get()));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,7 +81,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet();
|
indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet();
|
||||||
SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet();
|
SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet();
|
||||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L));
|
assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L));
|
||||||
searchResponse.decRef();
|
searchResponse.decRef();
|
||||||
ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet();
|
ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet();
|
||||||
Map<String, Object> indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap();
|
Map<String, Object> indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap();
|
||||||
|
@ -138,7 +138,7 @@ public class TransportSimulateBulkActionIT extends ESIntegTestCase {
|
||||||
// Now make sure nothing was actually changed:
|
// Now make sure nothing was actually changed:
|
||||||
indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet();
|
indicesAdmin().refresh(new RefreshRequest(indexName)).actionGet();
|
||||||
SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet();
|
SearchResponse searchResponse = client().search(new SearchRequest(indexName)).actionGet();
|
||||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L));
|
assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(0L));
|
||||||
searchResponse.decRef();
|
searchResponse.decRef();
|
||||||
ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet();
|
ClusterStateResponse clusterStateResponse = admin().cluster().state(new ClusterStateRequest(TEST_REQUEST_TIMEOUT)).actionGet();
|
||||||
Map<String, Object> indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap();
|
Map<String, Object> indexMapping = clusterStateResponse.getState().metadata().index(indexName).mapping().sourceAsMap();
|
||||||
|
|
|
@ -45,9 +45,9 @@ public class WriteAckDelayIT extends ESIntegTestCase {
|
||||||
try {
|
try {
|
||||||
logger.debug("running search");
|
logger.debug("running search");
|
||||||
assertResponse(prepareSearch("test"), response -> {
|
assertResponse(prepareSearch("test"), response -> {
|
||||||
if (response.getHits().getTotalHits().value != numOfDocs) {
|
if (response.getHits().getTotalHits().value() != numOfDocs) {
|
||||||
final String message = "Count is "
|
final String message = "Count is "
|
||||||
+ response.getHits().getTotalHits().value
|
+ response.getHits().getTotalHits().value()
|
||||||
+ " but "
|
+ " but "
|
||||||
+ numOfDocs
|
+ numOfDocs
|
||||||
+ " was expected. "
|
+ " was expected. "
|
||||||
|
|
|
@ -612,7 +612,7 @@ public class PointInTimeIT extends ESIntegTestCase {
|
||||||
assertThat(resp.getSuccessfulShards(), equalTo(numShards - shardsRemoved));
|
assertThat(resp.getSuccessfulShards(), equalTo(numShards - shardsRemoved));
|
||||||
assertThat(resp.getFailedShards(), equalTo(shardsRemoved));
|
assertThat(resp.getFailedShards(), equalTo(shardsRemoved));
|
||||||
assertNotNull(resp.getHits().getTotalHits());
|
assertNotNull(resp.getHits().getTotalHits());
|
||||||
assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs));
|
assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs));
|
||||||
});
|
});
|
||||||
|
|
||||||
// create a PIT when some shards are missing
|
// create a PIT when some shards are missing
|
||||||
|
@ -637,7 +637,7 @@ public class PointInTimeIT extends ESIntegTestCase {
|
||||||
assertThat(resp.getFailedShards(), equalTo(shardsRemoved));
|
assertThat(resp.getFailedShards(), equalTo(shardsRemoved));
|
||||||
assertThat(resp.pointInTimeId(), equalTo(pointInTimeResponseOneNodeDown.getPointInTimeId()));
|
assertThat(resp.pointInTimeId(), equalTo(pointInTimeResponseOneNodeDown.getPointInTimeId()));
|
||||||
assertNotNull(resp.getHits().getTotalHits());
|
assertNotNull(resp.getHits().getTotalHits());
|
||||||
assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs));
|
assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -661,7 +661,7 @@ public class PointInTimeIT extends ESIntegTestCase {
|
||||||
assertThat(resp.getSuccessfulShards(), equalTo(numShards));
|
assertThat(resp.getSuccessfulShards(), equalTo(numShards));
|
||||||
assertThat(resp.getFailedShards(), equalTo(0));
|
assertThat(resp.getFailedShards(), equalTo(0));
|
||||||
assertNotNull(resp.getHits().getTotalHits());
|
assertNotNull(resp.getHits().getTotalHits());
|
||||||
assertThat(resp.getHits().getTotalHits().value, greaterThan((long) numDocs));
|
assertThat(resp.getHits().getTotalHits().value(), greaterThan((long) numDocs));
|
||||||
});
|
});
|
||||||
|
|
||||||
// ensure that when using the previously created PIT, we'd see the same number of documents as before regardless of the
|
// ensure that when using the previously created PIT, we'd see the same number of documents as before regardless of the
|
||||||
|
@ -681,7 +681,7 @@ public class PointInTimeIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
assertNotNull(resp.getHits().getTotalHits());
|
assertNotNull(resp.getHits().getTotalHits());
|
||||||
// we expect less documents as the newly indexed ones should not be part of the PIT
|
// we expect less documents as the newly indexed ones should not be part of the PIT
|
||||||
assertThat(resp.getHits().getTotalHits().value, lessThan((long) numDocs));
|
assertThat(resp.getHits().getTotalHits().value(), lessThan((long) numDocs));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -143,7 +143,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
||||||
randomBoolean()
|
randomBoolean()
|
||||||
);
|
);
|
||||||
assertResponse(client().search(searchRequest), searchResponse -> {
|
assertResponse(client().search(searchRequest), searchResponse -> {
|
||||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
assertEquals(1, searchResponse.getHits().getTotalHits().value());
|
||||||
SearchHit[] hits = searchResponse.getHits().getHits();
|
SearchHit[] hits = searchResponse.getHits().getHits();
|
||||||
assertEquals(1, hits.length);
|
assertEquals(1, hits.length);
|
||||||
SearchHit hit = hits[0];
|
SearchHit hit = hits[0];
|
||||||
|
@ -162,7 +162,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
||||||
randomBoolean()
|
randomBoolean()
|
||||||
);
|
);
|
||||||
assertResponse(client().search(searchRequest), searchResponse -> {
|
assertResponse(client().search(searchRequest), searchResponse -> {
|
||||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
assertEquals(1, searchResponse.getHits().getTotalHits().value());
|
||||||
SearchHit[] hits = searchResponse.getHits().getHits();
|
SearchHit[] hits = searchResponse.getHits().getHits();
|
||||||
assertEquals(1, hits.length);
|
assertEquals(1, hits.length);
|
||||||
SearchHit hit = hits[0];
|
SearchHit hit = hits[0];
|
||||||
|
@ -221,7 +221,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
searchRequest.indices("<test-{now/d}>");
|
searchRequest.indices("<test-{now/d}>");
|
||||||
assertResponse(client().search(searchRequest), searchResponse -> {
|
assertResponse(client().search(searchRequest), searchResponse -> {
|
||||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
assertEquals(1, searchResponse.getHits().getTotalHits().value());
|
||||||
assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex());
|
assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -241,7 +241,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
||||||
sourceBuilder.query(rangeQuery);
|
sourceBuilder.query(rangeQuery);
|
||||||
searchRequest.source(sourceBuilder);
|
searchRequest.source(sourceBuilder);
|
||||||
assertResponse(client().search(searchRequest), searchResponse -> {
|
assertResponse(client().search(searchRequest), searchResponse -> {
|
||||||
assertEquals(1, searchResponse.getHits().getTotalHits().value);
|
assertEquals(1, searchResponse.getHits().getTotalHits().value());
|
||||||
assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex());
|
assertEquals("test-1970.01.01", searchResponse.getHits().getHits()[0].getIndex());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -280,7 +280,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
||||||
? originalRequest
|
? originalRequest
|
||||||
: SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true);
|
: SearchRequest.subSearchRequest(taskId, originalRequest, Strings.EMPTY_ARRAY, "remote", nowInMillis, true);
|
||||||
assertResponse(client().search(searchRequest), searchResponse -> {
|
assertResponse(client().search(searchRequest), searchResponse -> {
|
||||||
assertEquals(2, searchResponse.getHits().getTotalHits().value);
|
assertEquals(2, searchResponse.getHits().getTotalHits().value());
|
||||||
InternalAggregations aggregations = searchResponse.getAggregations();
|
InternalAggregations aggregations = searchResponse.getAggregations();
|
||||||
LongTerms longTerms = aggregations.get("terms");
|
LongTerms longTerms = aggregations.get("terms");
|
||||||
assertEquals(1, longTerms.getBuckets().size());
|
assertEquals(1, longTerms.getBuckets().size());
|
||||||
|
@ -296,7 +296,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
||||||
false
|
false
|
||||||
);
|
);
|
||||||
assertResponse(client().search(searchRequest), searchResponse -> {
|
assertResponse(client().search(searchRequest), searchResponse -> {
|
||||||
assertEquals(2, searchResponse.getHits().getTotalHits().value);
|
assertEquals(2, searchResponse.getHits().getTotalHits().value());
|
||||||
InternalAggregations aggregations = searchResponse.getAggregations();
|
InternalAggregations aggregations = searchResponse.getAggregations();
|
||||||
LongTerms longTerms = aggregations.get("terms");
|
LongTerms longTerms = aggregations.get("terms");
|
||||||
assertEquals(2, longTerms.getBuckets().size());
|
assertEquals(2, longTerms.getBuckets().size());
|
||||||
|
@ -432,7 +432,7 @@ public class TransportSearchIT extends ESIntegTestCase {
|
||||||
() -> assertResponse(
|
() -> assertResponse(
|
||||||
prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03"))
|
prepareSearch("test").setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03"))
|
||||||
.setPreFilterShardSize(randomIntBetween(1, 3)),
|
.setPreFilterShardSize(randomIntBetween(1, 3)),
|
||||||
resp -> assertThat(resp.getHits().getTotalHits().value, equalTo(2L))
|
resp -> assertThat(resp.getHits().getTotalHits().value(), equalTo(2L))
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -396,7 +396,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("foos").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("foos").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L))
|
||||||
);
|
);
|
||||||
|
|
||||||
logger.info("--> checking filtering alias for one index");
|
logger.info("--> checking filtering alias for one index");
|
||||||
|
@ -406,7 +406,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L))
|
||||||
);
|
);
|
||||||
|
|
||||||
logger.info("--> checking filtering alias for two indices and one complete index");
|
logger.info("--> checking filtering alias for two indices and one complete index");
|
||||||
|
@ -416,7 +416,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("foos", "test1").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("foos", "test1").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L))
|
||||||
);
|
);
|
||||||
|
|
||||||
logger.info("--> checking filtering alias for two indices and non-filtering alias for one index");
|
logger.info("--> checking filtering alias for two indices and non-filtering alias for one index");
|
||||||
|
@ -426,17 +426,17 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("foos", "aliasToTest1").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("foos", "aliasToTest1").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L))
|
||||||
);
|
);
|
||||||
|
|
||||||
logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices");
|
logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices");
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("foos", "aliasToTests").setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L))
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L))
|
||||||
);
|
);
|
||||||
|
|
||||||
logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices");
|
logger.info("--> checking filtering alias for two indices and non-filtering alias for both indices");
|
||||||
|
@ -446,7 +446,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.termQuery("name", "something")),
|
prepareSearch("foos", "aliasToTests").setSize(0).setQuery(QueryBuilders.termQuery("name", "something")),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(2L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(2L))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -508,7 +508,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("filter23", "filter13").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("filter23", "filter13").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertResponse(
|
assertResponse(
|
||||||
|
@ -517,7 +517,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("filter23", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("filter23", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(5L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(5L))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertResponse(
|
assertResponse(
|
||||||
|
@ -526,7 +526,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("filter13", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("filter13", "filter1").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(4L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(4L))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertResponse(
|
assertResponse(
|
||||||
|
@ -535,7 +535,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("filter13", "filter1", "filter23").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("filter13", "filter1", "filter23").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertResponse(
|
assertResponse(
|
||||||
|
@ -544,7 +544,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("filter23", "filter13", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("filter23", "filter13", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(6L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(6L))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertResponse(
|
assertResponse(
|
||||||
|
@ -553,7 +553,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
);
|
);
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("filter23", "filter13", "test1", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("filter23", "filter13", "test1", "test2").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(8L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(8L))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -608,7 +608,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
logger.info("--> checking counts before delete");
|
logger.info("--> checking counts before delete");
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("bars").setSize(0).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L))
|
searchResponse -> assertThat(searchResponse.getHits().getTotalHits().value(), equalTo(1L))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1399,7 +1399,7 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertHits(SearchHits hits, String... ids) {
|
private void assertHits(SearchHits hits, String... ids) {
|
||||||
assertThat(hits.getTotalHits().value, equalTo((long) ids.length));
|
assertThat(hits.getTotalHits().value(), equalTo((long) ids.length));
|
||||||
Set<String> hitIds = new HashSet<>();
|
Set<String> hitIds = new HashSet<>();
|
||||||
for (SearchHit hit : hits.getHits()) {
|
for (SearchHit hit : hits.getHits()) {
|
||||||
hitIds.add(hit.getId());
|
hitIds.add(hit.getId());
|
||||||
|
|
|
@ -44,7 +44,7 @@ public class BroadcastActionsIT extends ESIntegTestCase {
|
||||||
for (int i = 0; i < 5; i++) {
|
for (int i = 0; i < 5; i++) {
|
||||||
// test successful
|
// test successful
|
||||||
assertResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> {
|
assertResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> {
|
||||||
assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries));
|
assertThat(countResponse.getTotalShards(), equalTo(numShards.numPrimaries));
|
||||||
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
|
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
|
||||||
assertThat(countResponse.getFailedShards(), equalTo(0));
|
assertThat(countResponse.getFailedShards(), equalTo(0));
|
||||||
|
|
|
@ -152,7 +152,7 @@ public class DocumentActionsIT extends ESIntegTestCase {
|
||||||
for (int i = 0; i < 5; i++) {
|
for (int i = 0; i < 5; i++) {
|
||||||
// test successful
|
// test successful
|
||||||
assertNoFailuresAndResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> {
|
assertNoFailuresAndResponse(prepareSearch("test").setSize(0).setQuery(matchAllQuery()), countResponse -> {
|
||||||
assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
|
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
|
||||||
assertThat(countResponse.getFailedShards(), equalTo(0));
|
assertThat(countResponse.getFailedShards(), equalTo(0));
|
||||||
});
|
});
|
||||||
|
@ -164,7 +164,7 @@ public class DocumentActionsIT extends ESIntegTestCase {
|
||||||
countResponse.getShardFailures() == null ? 0 : countResponse.getShardFailures().length,
|
countResponse.getShardFailures() == null ? 0 : countResponse.getShardFailures().length,
|
||||||
equalTo(0)
|
equalTo(0)
|
||||||
);
|
);
|
||||||
assertThat(countResponse.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(countResponse.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
|
assertThat(countResponse.getSuccessfulShards(), equalTo(numShards.numPrimaries));
|
||||||
assertThat(countResponse.getFailedShards(), equalTo(0));
|
assertThat(countResponse.getFailedShards(), equalTo(0));
|
||||||
});
|
});
|
||||||
|
|
|
@ -115,7 +115,7 @@ public class FinalPipelineIT extends ESIntegTestCase {
|
||||||
.get();
|
.get();
|
||||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||||
assertResponse(prepareSearch("target"), response -> {
|
assertResponse(prepareSearch("target"), response -> {
|
||||||
assertEquals(1, response.getHits().getTotalHits().value);
|
assertEquals(1, response.getHits().getTotalHits().value());
|
||||||
assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final"));
|
assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final"));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -139,7 +139,7 @@ public class FinalPipelineIT extends ESIntegTestCase {
|
||||||
.get();
|
.get();
|
||||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||||
assertResponse(prepareSearch("target"), response -> {
|
assertResponse(prepareSearch("target"), response -> {
|
||||||
assertEquals(1, response.getHits().getTotalHits().value);
|
assertEquals(1, response.getHits().getTotalHits().value());
|
||||||
assertEquals(true, response.getHits().getAt(0).getSourceAsMap().get("final"));
|
assertEquals(true, response.getHits().getAt(0).getSourceAsMap().get("final"));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -163,7 +163,7 @@ public class FinalPipelineIT extends ESIntegTestCase {
|
||||||
.get();
|
.get();
|
||||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||||
assertResponse(prepareSearch("target"), response -> {
|
assertResponse(prepareSearch("target"), response -> {
|
||||||
assertEquals(1, response.getHits().getTotalHits().value);
|
assertEquals(1, response.getHits().getTotalHits().value());
|
||||||
assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final"));
|
assertFalse(response.getHits().getAt(0).getSourceAsMap().containsKey("final"));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -187,7 +187,7 @@ public class FinalPipelineIT extends ESIntegTestCase {
|
||||||
.get();
|
.get();
|
||||||
assertEquals(RestStatus.CREATED, indexResponse.status());
|
assertEquals(RestStatus.CREATED, indexResponse.status());
|
||||||
assertResponse(prepareSearch("target"), response -> {
|
assertResponse(prepareSearch("target"), response -> {
|
||||||
assertEquals(1, response.getHits().getTotalHits().value);
|
assertEquals(1, response.getHits().getTotalHits().value());
|
||||||
assertTrue(response.getHits().getAt(0).getSourceAsMap().containsKey("final"));
|
assertTrue(response.getHits().getAt(0).getSourceAsMap().containsKey("final"));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -107,7 +107,7 @@ public class MaxDocsLimitIT extends ESIntegTestCase {
|
||||||
indicesAdmin().prepareRefresh("test").get();
|
indicesAdmin().prepareRefresh("test").get();
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0),
|
prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0),
|
||||||
response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get()))
|
response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get()))
|
||||||
);
|
);
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
indicesAdmin().prepareFlush("test").get();
|
indicesAdmin().prepareFlush("test").get();
|
||||||
|
@ -117,7 +117,7 @@ public class MaxDocsLimitIT extends ESIntegTestCase {
|
||||||
ensureGreen("test");
|
ensureGreen("test");
|
||||||
assertNoFailuresAndResponse(
|
assertNoFailuresAndResponse(
|
||||||
prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0),
|
prepareSearch("test").setQuery(new MatchAllQueryBuilder()).setTrackTotalHitsUpTo(Integer.MAX_VALUE).setSize(0),
|
||||||
response -> assertThat(response.getHits().getTotalHits().value, equalTo((long) maxDocs.get()))
|
response -> assertThat(response.getHits().getTotalHits().value(), equalTo((long) maxDocs.get()))
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@ public class CopyToMapperIntegrationIT extends ESIntegTestCase {
|
||||||
AggregationBuilders.terms("test_raw").field("test_field_raw").size(recordCount * 2).collectMode(aggCollectionMode)
|
AggregationBuilders.terms("test_raw").field("test_field_raw").size(recordCount * 2).collectMode(aggCollectionMode)
|
||||||
),
|
),
|
||||||
response -> {
|
response -> {
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo((long) recordCount));
|
assertThat(response.getHits().getTotalHits().value(), equalTo((long) recordCount));
|
||||||
|
|
||||||
assertThat(((Terms) response.getAggregations().get("test")).getBuckets().size(), equalTo(recordCount + 1));
|
assertThat(((Terms) response.getAggregations().get("test")).getBuckets().size(), equalTo(recordCount + 1));
|
||||||
assertThat(((Terms) response.getAggregations().get("test_raw")).getBuckets().size(), equalTo(recordCount));
|
assertThat(((Terms) response.getAggregations().get("test_raw")).getBuckets().size(), equalTo(recordCount));
|
||||||
|
|
|
@ -115,7 +115,7 @@ public class ExceptionRetryIT extends ESIntegTestCase {
|
||||||
assertResponse(
|
assertResponse(
|
||||||
prepareSearch("index").setQuery(termQuery("_id", response.getHits().getHits()[i].getId())).setExplain(true),
|
prepareSearch("index").setQuery(termQuery("_id", response.getHits().getHits()[i].getId())).setExplain(true),
|
||||||
dupIdResponse -> {
|
dupIdResponse -> {
|
||||||
assertThat(dupIdResponse.getHits().getTotalHits().value, greaterThan(1L));
|
assertThat(dupIdResponse.getHits().getTotalHits().value(), greaterThan(1L));
|
||||||
logger.info("found a duplicate id:");
|
logger.info("found a duplicate id:");
|
||||||
for (SearchHit hit : dupIdResponse.getHits()) {
|
for (SearchHit hit : dupIdResponse.getHits()) {
|
||||||
logger.info("Doc {} was found on shard {}", hit.getId(), hit.getShard().getShardId());
|
logger.info("Doc {} was found on shard {}", hit.getId(), hit.getShard().getShardId());
|
||||||
|
|
|
@ -57,9 +57,9 @@ public class IndexActionIT extends ESIntegTestCase {
|
||||||
try {
|
try {
|
||||||
logger.debug("running search with all types");
|
logger.debug("running search with all types");
|
||||||
assertResponse(prepareSearch("test"), response -> {
|
assertResponse(prepareSearch("test"), response -> {
|
||||||
if (response.getHits().getTotalHits().value != numOfDocs) {
|
if (response.getHits().getTotalHits().value() != numOfDocs) {
|
||||||
final String message = "Count is "
|
final String message = "Count is "
|
||||||
+ response.getHits().getTotalHits().value
|
+ response.getHits().getTotalHits().value()
|
||||||
+ " but "
|
+ " but "
|
||||||
+ numOfDocs
|
+ numOfDocs
|
||||||
+ " was expected. "
|
+ " was expected. "
|
||||||
|
@ -77,9 +77,9 @@ public class IndexActionIT extends ESIntegTestCase {
|
||||||
try {
|
try {
|
||||||
logger.debug("running search with a specific type");
|
logger.debug("running search with a specific type");
|
||||||
assertResponse(prepareSearch("test"), response -> {
|
assertResponse(prepareSearch("test"), response -> {
|
||||||
if (response.getHits().getTotalHits().value != numOfDocs) {
|
if (response.getHits().getTotalHits().value() != numOfDocs) {
|
||||||
final String message = "Count is "
|
final String message = "Count is "
|
||||||
+ response.getHits().getTotalHits().value
|
+ response.getHits().getTotalHits().value()
|
||||||
+ " but "
|
+ " but "
|
||||||
+ numOfDocs
|
+ numOfDocs
|
||||||
+ " was expected. "
|
+ " was expected. "
|
||||||
|
|
|
@ -149,7 +149,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.addAggregation(new GlobalAggregationBuilder("global")),
|
.addAggregation(new GlobalAggregationBuilder("global")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(7L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(7L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 0, 5);
|
assertCacheState(client, "index", 0, 5);
|
||||||
|
@ -161,7 +161,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.addAggregation(new GlobalAggregationBuilder("global")),
|
.addAggregation(new GlobalAggregationBuilder("global")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(7L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(7L));
|
||||||
|
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -174,7 +174,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.addAggregation(new GlobalAggregationBuilder("global")),
|
.addAggregation(new GlobalAggregationBuilder("global")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(7L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(7L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 6, 9);
|
assertCacheState(client, "index", 6, 9);
|
||||||
|
@ -217,7 +217,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")),
|
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(8L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(8L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 0, 1);
|
assertCacheState(client, "index", 0, 1);
|
||||||
|
@ -229,7 +229,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")),
|
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(8L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(8L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 1, 1);
|
assertCacheState(client, "index", 1, 1);
|
||||||
|
@ -241,7 +241,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")),
|
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-28")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(8L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(8L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 2, 1);
|
assertCacheState(client, "index", 2, 1);
|
||||||
|
@ -286,7 +286,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.addAggregation(new GlobalAggregationBuilder("global")),
|
.addAggregation(new GlobalAggregationBuilder("global")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(9L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(9L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 0, 1);
|
assertCacheState(client, "index", 0, 1);
|
||||||
|
@ -299,7 +299,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.addAggregation(new GlobalAggregationBuilder("global")),
|
.addAggregation(new GlobalAggregationBuilder("global")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(9L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(9L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 1, 1);
|
assertCacheState(client, "index", 1, 1);
|
||||||
|
@ -312,7 +312,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.addAggregation(new GlobalAggregationBuilder("global")),
|
.addAggregation(new GlobalAggregationBuilder("global")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(9L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(9L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 2, 1);
|
assertCacheState(client, "index", 2, 1);
|
||||||
|
@ -364,7 +364,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")),
|
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(8L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(8L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index-1", 0, 1);
|
assertCacheState(client, "index-1", 0, 1);
|
||||||
|
@ -381,7 +381,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")),
|
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(8L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(8L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index-1", 1, 1);
|
assertCacheState(client, "index-1", 1, 1);
|
||||||
|
@ -395,7 +395,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")),
|
.setQuery(QueryBuilders.rangeQuery("d").gte("now-7d/d").lte("now")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(8L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(8L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index-1", 2, 1);
|
assertCacheState(client, "index-1", 2, 1);
|
||||||
|
@ -440,7 +440,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-25")),
|
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-19").lte("2016-03-25")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(7L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(7L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 0, 0);
|
assertCacheState(client, "index", 0, 0);
|
||||||
|
@ -453,7 +453,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")),
|
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(7L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(7L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 0, 0);
|
assertCacheState(client, "index", 0, 0);
|
||||||
|
@ -468,7 +468,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")),
|
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-20").lte("2016-03-26")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(7L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(7L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 0, 0);
|
assertCacheState(client, "index", 0, 0);
|
||||||
|
@ -483,7 +483,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.addAggregation(dateRange("foo").field("s").addRange("now-10y", "now")),
|
.addAggregation(dateRange("foo").field("s").addRange("now-10y", "now")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(7L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(7L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 0, 0);
|
assertCacheState(client, "index", 0, 0);
|
||||||
|
@ -497,7 +497,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-21").lte("2016-03-27")),
|
.setQuery(QueryBuilders.rangeQuery("s").gte("2016-03-21").lte("2016-03-27")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(7L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(7L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 0, 2);
|
assertCacheState(client, "index", 0, 2);
|
||||||
|
@ -512,7 +512,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.addAggregation(filter("foo", QueryBuilders.rangeQuery("s").from("now-10y").to("now"))),
|
.addAggregation(filter("foo", QueryBuilders.rangeQuery("s").from("now-10y").to("now"))),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(7L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(7L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 0, 4);
|
assertCacheState(client, "index", 0, 4);
|
||||||
|
@ -543,7 +543,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")),
|
.setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 0, 1);
|
assertCacheState(client, "index", 0, 1);
|
||||||
|
@ -555,20 +555,20 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
.setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")),
|
.setQuery(QueryBuilders.rangeQuery("created_at").gte("now-7d/d")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
assertCacheState(client, "index", 1, 1);
|
assertCacheState(client, "index", 1, 1);
|
||||||
|
|
||||||
assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> {
|
assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
});
|
});
|
||||||
assertCacheState(client, "index", 1, 2);
|
assertCacheState(client, "index", 1, 2);
|
||||||
|
|
||||||
assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> {
|
assertResponse(client.prepareSearch("last_week").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(0), response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
});
|
});
|
||||||
assertCacheState(client, "index", 2, 2);
|
assertCacheState(client, "index", 2, 2);
|
||||||
}
|
}
|
||||||
|
@ -591,7 +591,7 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
|
||||||
client.prepareSearch("index").setRequestCache(true).setProfile(profile).setQuery(QueryBuilders.termQuery("k", "hello")),
|
client.prepareSearch("index").setRequestCache(true).setProfile(profile).setQuery(QueryBuilders.termQuery("k", "hello")),
|
||||||
response -> {
|
response -> {
|
||||||
ElasticsearchAssertions.assertAllSuccessful(response);
|
ElasticsearchAssertions.assertAllSuccessful(response);
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(1L));
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
if (profile == false) {
|
if (profile == false) {
|
||||||
|
|
|
@ -229,7 +229,7 @@ public class CloseWhileRelocatingShardsIT extends ESIntegTestCase {
|
||||||
|
|
||||||
for (String index : acknowledgedCloses) {
|
for (String index : acknowledgedCloses) {
|
||||||
assertResponse(prepareSearch(index).setSize(0).setTrackTotalHits(true), response -> {
|
assertResponse(prepareSearch(index).setSize(0).setTrackTotalHits(true), response -> {
|
||||||
long docsCount = response.getHits().getTotalHits().value;
|
long docsCount = response.getHits().getTotalHits().value();
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"Expected "
|
"Expected "
|
||||||
+ docsPerIndex.get(index)
|
+ docsPerIndex.get(index)
|
||||||
|
|
|
@ -344,7 +344,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
||||||
prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).setTrackTotalHits(true).addSort("id", SortOrder.ASC),
|
prepareSearch().setSize((int) numberOfDocs).setQuery(matchAllQuery()).setTrackTotalHits(true).addSort("id", SortOrder.ASC),
|
||||||
response -> {
|
response -> {
|
||||||
logSearchResponse(numberOfShards, numberOfDocs, finalI, response);
|
logSearchResponse(numberOfShards, numberOfDocs, finalI, response);
|
||||||
iterationHitCount[finalI] = response.getHits().getTotalHits().value;
|
iterationHitCount[finalI] = response.getHits().getTotalHits().value();
|
||||||
if (iterationHitCount[finalI] != numberOfDocs) {
|
if (iterationHitCount[finalI] != numberOfDocs) {
|
||||||
error[0] = true;
|
error[0] = true;
|
||||||
}
|
}
|
||||||
|
@ -391,7 +391,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
||||||
boolean[] errorOccurred = new boolean[1];
|
boolean[] errorOccurred = new boolean[1];
|
||||||
for (int i = 0; i < iterations; i++) {
|
for (int i = 0; i < iterations; i++) {
|
||||||
assertResponse(prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()), response -> {
|
assertResponse(prepareSearch().setTrackTotalHits(true).setSize(0).setQuery(matchAllQuery()), response -> {
|
||||||
if (response.getHits().getTotalHits().value != numberOfDocs) {
|
if (response.getHits().getTotalHits().value() != numberOfDocs) {
|
||||||
errorOccurred[0] = true;
|
errorOccurred[0] = true;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -421,7 +421,7 @@ public class RecoveryWhileUnderLoadIT extends ESIntegTestCase {
|
||||||
logger.info(
|
logger.info(
|
||||||
"iteration [{}] - returned documents: {} (expected {})",
|
"iteration [{}] - returned documents: {} (expected {})",
|
||||||
iteration,
|
iteration,
|
||||||
searchResponse.getHits().getTotalHits().value,
|
searchResponse.getHits().getTotalHits().value(),
|
||||||
numberOfDocs
|
numberOfDocs
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -240,7 +240,7 @@ public class RelocationIT extends ESIntegTestCase {
|
||||||
prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields(),
|
prepareSearch("test").setQuery(matchAllQuery()).setSize((int) indexer.totalIndexedDocs()).storedFields(),
|
||||||
response -> {
|
response -> {
|
||||||
var hits = response.getHits();
|
var hits = response.getHits();
|
||||||
if (hits.getTotalHits().value != indexer.totalIndexedDocs()) {
|
if (hits.getTotalHits().value() != indexer.totalIndexedDocs()) {
|
||||||
int[] hitIds = new int[(int) indexer.totalIndexedDocs()];
|
int[] hitIds = new int[(int) indexer.totalIndexedDocs()];
|
||||||
for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) {
|
for (int hit = 0; hit < indexer.totalIndexedDocs(); hit++) {
|
||||||
hitIds[hit] = hit + 1;
|
hitIds[hit] = hit + 1;
|
||||||
|
@ -254,7 +254,7 @@ public class RelocationIT extends ESIntegTestCase {
|
||||||
}
|
}
|
||||||
set.forEach(value -> logger.error("Missing id [{}]", value));
|
set.forEach(value -> logger.error("Missing id [{}]", value));
|
||||||
}
|
}
|
||||||
assertThat(hits.getTotalHits().value, equalTo(indexer.totalIndexedDocs()));
|
assertThat(hits.getTotalHits().value(), equalTo(indexer.totalIndexedDocs()));
|
||||||
logger.info("--> DONE search test round {}", idx + 1);
|
logger.info("--> DONE search test round {}", idx + 1);
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -364,9 +364,9 @@ public class RelocationIT extends ESIntegTestCase {
|
||||||
for (Client client : clients()) {
|
for (Client client : clients()) {
|
||||||
assertNoFailuresAndResponse(client.prepareSearch("test").setPreference("_local").setSize(0), response -> {
|
assertNoFailuresAndResponse(client.prepareSearch("test").setPreference("_local").setSize(0), response -> {
|
||||||
if (expectedCount[0] < 0) {
|
if (expectedCount[0] < 0) {
|
||||||
expectedCount[0] = response.getHits().getTotalHits().value;
|
expectedCount[0] = response.getHits().getTotalHits().value();
|
||||||
} else {
|
} else {
|
||||||
assertEquals(expectedCount[0], response.getHits().getTotalHits().value);
|
assertEquals(expectedCount[0], response.getHits().getTotalHits().value());
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -296,7 +296,7 @@ public class AliasRoutingIT extends ESIntegTestCase {
|
||||||
prepareSearch("index_*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1).setQuery(QueryBuilders.matchAllQuery()),
|
prepareSearch("index_*").setSearchType(SearchType.QUERY_THEN_FETCH).setSize(1).setQuery(QueryBuilders.matchAllQuery()),
|
||||||
response -> {
|
response -> {
|
||||||
logger.info("--> search all on index_* should find two");
|
logger.info("--> search all on index_* should find two");
|
||||||
assertThat(response.getHits().getTotalHits().value, equalTo(2L));
|
assertThat(response.getHits().getTotalHits().value(), equalTo(2L));
|
||||||
// Let's make sure that, even though 2 docs are available, only one is returned according to the size we set in the request
|
// Let's make sure that, even though 2 docs are available, only one is returned according to the size we set in the request
|
||||||
// Therefore the reduce phase has taken place, which proves that the QUERY_AND_FETCH search type wasn't erroneously forced.
|
// Therefore the reduce phase has taken place, which proves that the QUERY_AND_FETCH search type wasn't erroneously forced.
|
||||||
assertThat(response.getHits().getHits().length, equalTo(1));
|
assertThat(response.getHits().getHits().length, equalTo(1));
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue