Upgrade Lucene to 9.1.0-snapshot-949752 (#84540)

This PR upgrades Lucene to a newer snapshot `9.1.0-snapshot-9497524cc2d`.

Changes:
* Adapt to `LeafReader#searchNearestVectors` signature change
* Adapt checks in `GeometryIndexerTests`, `SearchServiceTests`, `FiltersAggregatorTests`, `AggregationProfilerIT`
* Address highlighting failures in `MultiPhrasePrefixQuery` and `HasChildQueryBuilder.LateParsingQuery`
This commit is contained in:
Julie Tibshirani 2022-03-04 08:36:26 -08:00 committed by GitHub
parent d831c50178
commit bba2dfac56
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
61 changed files with 123 additions and 49 deletions

View file

@ -1,5 +1,5 @@
elasticsearch = 8.2.0
lucene = 9.1.0-snapshot-1336263051c
lucene = 9.1.0-snapshot-9497524cc2d
bundled_jdk_vendor = adoptium
bundled_jdk = 17.0.2+8

View file

@ -1 +0,0 @@
d7d416323284b166af4eb020987f4abdd9433344

View file

@ -0,0 +1 @@
fb9c3a9c5e21880bc9add56283f1a78be6000021

View file

@ -1 +0,0 @@
2bc7d3e319fd7d7e4f43ab1f559a0efc4881545d

View file

@ -0,0 +1 @@
c50b37f391320adb572fbd3109953d36eddffb88

View file

@ -22,6 +22,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.search.NoRewriteMatchNoDocsQuery;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.plain.SortedSetOrdinalsIndexFieldData;
@ -425,7 +426,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
// blow up since for this query to work we have to have a DirectoryReader otherwise
// we can't load global ordinals - for this to work we simply check if the reader has no leaves
// and rewrite to match nothing
return new MatchNoDocsQuery();
return new NoRewriteMatchNoDocsQuery("Can't load against an empty reader");
}
throw new IllegalStateException(
"can't load global ordinals for reader of type: " + reader.getClass() + " must be a DirectoryReader"

View file

@ -1 +0,0 @@
0a60981b470cb004c8a1fa6264d7714fa7ba6acb

View file

@ -0,0 +1 @@
5e3f60736f5fa8e665c906326b659d4246c15c49

View file

@ -1 +0,0 @@
be1ff8e03e20d89f0d086db15a0478d9b5722972

View file

@ -0,0 +1 @@
1d7baaa61a5896a505ae0f493978495cfbfe04d2

View file

@ -1 +0,0 @@
fb2cc60f82827d83b59dca218f97cdc746de1ffe

View file

@ -0,0 +1 @@
52a7ed9945a7d7f0ad316992494610af3d54451a

View file

@ -1 +0,0 @@
23dcc8fdfeddd13411e470c0af643b45a2ef71c4

View file

@ -0,0 +1 @@
75f996d3ba1f8c9f8c0100e4bc7d768fca6b2ce3

View file

@ -1 +0,0 @@
be6517c82fb00699991363a785f1b63650493a01

View file

@ -0,0 +1 @@
aa03db102c7a06f82845258e9bee775e5dfd0d29

View file

@ -1 +0,0 @@
6a73d215b184c7b05d2cef06d342166bc4436414

View file

@ -0,0 +1 @@
efe5d28db9e2dcecf3d79162229634807e60550c

View file

@ -1 +0,0 @@
8d880b5ec86f40c9c8c23bc3c07ee1ae17f74354

View file

@ -0,0 +1 @@
7ea5e4e5e41e02aa3b484da4519780a4e048f62b

View file

@ -1 +0,0 @@
185d5cf3bbe7af1bf3b6577f69d4648b7f21fb08

View file

@ -0,0 +1 @@
41a3133a82ebd1414c76895e5fcd9c2bd93cf878

View file

@ -1 +0,0 @@
dd944abcbf701063d823d71f8acb42070c70369f

View file

@ -0,0 +1 @@
e6206973ba2125c2a15c7ef4da87a5551aa206bc

View file

@ -1 +0,0 @@
b40ede13eebf417f2ffb07a1b1594eab92d7df97

View file

@ -0,0 +1 @@
b9800da72fab21cc2a5bfcef8ae4a68bd91dcc9e

View file

@ -1 +0,0 @@
65e470be45afb0109a9b128516c115fb52340035

View file

@ -0,0 +1 @@
3afaf888c47bed46dc1d3fb50d1633b45dc3b76c

View file

@ -1 +0,0 @@
34cae1ec8b65df516176c9086bbb893ae7706be8

View file

@ -0,0 +1 @@
eb37f9294eb9b6f2dcc71d74018d31740217cc26

View file

@ -1 +0,0 @@
3dc91793e4a0eca6c5073e5c12c6cf45e7dea5ff

View file

@ -0,0 +1 @@
b102f579d4a5c1c7ab3c6e81ca0d0208b90ccd16

View file

@ -1 +0,0 @@
141bc2714d21b4c986ef49b379cb7771051be425

View file

@ -0,0 +1 @@
e5f85835c61765da66d8ede6f902694e67557480

View file

@ -1 +0,0 @@
e3253aee779b7e3c7e55efad2a91cd3d6902a6b3

View file

@ -0,0 +1 @@
07c36e80700323c634d4b85b4f7aea2c58a69ea0

View file

@ -1 +0,0 @@
644df6a83310f5eb02bf8bf4069edf0f72fd8c32

View file

@ -0,0 +1 @@
9a9f1f2efe52bc576738d5f247e81aa5800fe71a

View file

@ -1 +0,0 @@
593f6ca05697bce8d755ac32679a8a93b74b64cd

View file

@ -0,0 +1 @@
60dac7d1cee06b6340db408754de40a403705be1

View file

@ -1 +0,0 @@
59adf31a24a4b2f5c32be3946e77760ca068361a

View file

@ -0,0 +1 @@
64f40470e3ad31651738043dc28d59306ec49f98

View file

@ -1 +0,0 @@
1b3ad2c10859f570df8d0fea7c79fdb66a82899a

View file

@ -0,0 +1 @@
1be2f3bc523c8541a591e9fe2595584222f6400b

View file

@ -1 +0,0 @@
8972c5c823990684aacb136ae33d3cc35709488c

View file

@ -0,0 +1 @@
019ec3e0e49d4db2ce28da027ab52f732966367d

View file

@ -697,9 +697,9 @@ public class AggregationProfilerIT extends ESIntegTestCase {
.entry(
"filters",
matchesList().item(
matchesMap().entry("query", "DocValuesFieldExistsQuery [field=date]")
.entry("specialized_for", "docvalues_field_exists")
matchesMap().entry("query", "*:*")
.entry("results_from_metadata", 0)
.entry("specialized_for", "match_all")
)
)
)

View file

@ -1034,12 +1034,12 @@ public class Lucene {
}
@Override
public VectorValues getVectorValues(String field) throws IOException {
public VectorValues getVectorValues(String field) {
return null;
}
@Override
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException {
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs, int visitedLimit) {
return null;
}

View file

@ -159,11 +159,12 @@ public class MultiPhrasePrefixQuery extends Query {
return Queries.newMatchNoDocsQuery("No terms supplied for " + MultiPhrasePrefixQuery.class.getName());
}
// if the terms does not exist we could return a MatchNoDocsQuery but this would break the unified highlighter
// which rewrites query with an empty reader.
// Hack so that the Unified Highlighter can still extract the original terms from this query
// after rewriting, even though it would normally become a MatchNoDocsQuery against an empty
// index
return new BooleanQuery.Builder().add(query.build(), BooleanClause.Occur.MUST)
.add(
Queries.newMatchNoDocsQuery("No terms supplied for " + MultiPhrasePrefixQuery.class.getName()),
new NoRewriteMatchNoDocsQuery("No terms supplied for " + MultiPhrasePrefixQuery.class.getName()),
BooleanClause.Occur.MUST
)
.build();

View file

@ -0,0 +1,75 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.common.lucene.search;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryVisitor;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import java.io.IOException;
import java.util.Objects;
/**
* A MatchNoDocsQuery that will not be recognised by Lucene's core rewriting rules,
* useful for protecting the unified highlighter against rewriting. See
* https://issues.apache.org/jira/browse/LUCENE-10454 for an explanation.
*/
public class NoRewriteMatchNoDocsQuery extends Query {
private final String reason;
public NoRewriteMatchNoDocsQuery(String reason) {
this.reason = reason;
}
@Override
public Weight createWeight(IndexSearcher searcher, ScoreMode scoreMode, float boost) throws IOException {
return new Weight(this) {
@Override
public Explanation explain(LeafReaderContext context, int doc) {
return Explanation.noMatch(reason);
}
@Override
public Scorer scorer(LeafReaderContext context) {
return null;
}
@Override
public boolean isCacheable(LeafReaderContext ctx) {
return true;
}
};
}
@Override
public String toString(String field) {
return "NoRewriteMatchNoDocsQuery(" + reason + ")";
}
@Override
public void visit(QueryVisitor visitor) {
}
@Override
public boolean equals(Object obj) {
return obj instanceof NoRewriteMatchNoDocsQuery q && Objects.equals(this.reason, q.reason);
}
@Override
public int hashCode() {
return Objects.hashCode(reason);
}
}

View file

@ -351,8 +351,8 @@ final class TranslogDirectoryReader extends DirectoryReader {
}
@Override
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException {
return getDelegate().searchNearestVectors(field, target, k, acceptDocs);
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs, int visitedLimit) throws IOException {
return getDelegate().searchNearestVectors(field, target, k, acceptDocs, visitedLimit);
}
@Override

View file

@ -196,7 +196,7 @@ class DocumentLeafReader extends LeafReader {
}
@Override
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException {
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs, int visitedLimit) {
throw new UnsupportedOperationException();
}

View file

@ -196,8 +196,8 @@ public class FieldUsageTrackingDirectoryReader extends FilterDirectoryReader {
}
@Override
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException {
TopDocs topDocs = super.searchNearestVectors(field, target, k, acceptDocs);
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs, int visitedLimit) throws IOException {
TopDocs topDocs = super.searchNearestVectors(field, target, k, acceptDocs, visitedLimit);
if (topDocs != null) {
notifier.onKnnVectorsUsed(field);
}

View file

@ -226,7 +226,7 @@ public class GeometryIndexerTests extends ESTestCase {
public void testPolygonAllCollinearPoints() {
Polygon polygon = new Polygon(new LinearRing(new double[] { 0, 1, -1, 0 }, new double[] { 0, 1, -1, 0 }));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> indexer.indexShape(polygon));
assertEquals("Unable to Tessellate shape. Possible malformed shape detected.", e.getMessage());
assertEquals("at least three non-collinear points required", e.getMessage());
}
private XContentBuilder polygon(Boolean orientation, double... val) throws IOException {

View file

@ -12,7 +12,6 @@ import com.carrotsearch.hppc.IntArrayList;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FilterDirectoryReader;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.store.AlreadyClosedException;
import org.elasticsearch.ElasticsearchException;
@ -42,7 +41,6 @@ import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.AbstractRefCounted;
import org.elasticsearch.core.TimeValue;
@ -1138,10 +1136,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
mapping.endObject().endObject();
createIndex("test", Settings.EMPTY, mapping);
withAggregationContext(
"test",
context -> assertThat(context.query(), equalTo(new ConstantScoreQuery(Queries.newNonNestedFilter())))
);
withAggregationContext("test", context -> assertThat(context.query(), equalTo(new MatchAllDocsQuery())));
}
/**

View file

@ -1296,6 +1296,8 @@ public class FiltersAggregatorTests extends AggregatorTestCase {
for (int i = 0; i < 10; i++) {
iw.addDocument(buildDocWithField.apply(i));
}
// Create a document without the field to prever DocValueFieldExists from being rewritten to MatchAll
iw.addDocument(List.of());
}, (InternalFilters result, Class<? extends Aggregator> impl, Map<String, Map<String, Object>> debug) -> {
assertThat(result.getBuckets(), hasSize(1));
assertThat(result.getBucketByKey("q1").getDocCount(), equalTo(10L));

View file

@ -217,7 +217,7 @@ final class RewriteCachingDirectoryReader extends DirectoryReader {
}
@Override
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException {
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs, int visitedLimit) throws IOException {
throw new UnsupportedOperationException();
}

View file

@ -283,8 +283,8 @@ public final class FieldSubsetReader extends SequentialStoredFieldsLeafReader {
}
@Override
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs) throws IOException {
return hasField(field) ? super.searchNearestVectors(field, target, k, acceptDocs) : null;
public TopDocs searchNearestVectors(String field, float[] target, int k, Bits acceptDocs, int visitedLimit) throws IOException {
return hasField(field) ? super.searchNearestVectors(field, target, k, acceptDocs, visitedLimit) : null;
}
// we share core cache keys (for e.g. fielddata)

View file

@ -203,13 +203,13 @@ public class FieldSubsetReaderTests extends ESTestCase {
assertEquals(0, vectorValues.nextDoc());
assertNotNull(vectorValues.binaryValue());
TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new float[] { 1.0f, 1.0f, 1.0f }, 5, null);
TopDocs topDocs = leafReader.searchNearestVectors("fieldA", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE);
assertNotNull(topDocs);
assertEquals(1, topDocs.scoreDocs.length);
// Check that we can't see fieldB
assertNull(leafReader.getVectorValues("fieldB"));
assertNull(leafReader.searchNearestVectors("fieldB", new float[] { 1.0f, 1.0f, 1.0f }, 5, null));
assertNull(leafReader.searchNearestVectors("fieldB", new float[] { 1.0f, 1.0f, 1.0f }, 5, null, Integer.MAX_VALUE));
TestUtil.checkReader(ir);
IOUtils.close(ir, iw, dir);

View file

@ -1 +0,0 @@
0a60981b470cb004c8a1fa6264d7714fa7ba6acb

View file

@ -0,0 +1 @@
5e3f60736f5fa8e665c906326b659d4246c15c49