Upgrade to a Lucene 7 snapshot (#24089)

We want to upgrade to Lucene 7 ahead of time in order to be able to check whether it causes any trouble to Elasticsearch before Lucene 7.0 gets released. From a user perspective, the main benefit of this upgrade is the enhanced support for sparse fields, whose resource consumption is now function of the number of docs that have a value rather than the total number of docs in the index.

Some notes about the change:
 - it includes the deprecation of the `disable_coord` parameter of the `bool` and `common_terms` queries: Lucene has removed support for coord factors
 - it includes the deprecation of the `index.similarity.base` expert setting, since it was only useful to configure coords and query norms, which have both been removed
 - two tests have been marked with `@AwaitsFix` because of #23966, which we intend to address after the merge
This commit is contained in:
Adrien Grand 2017-04-18 15:17:21 +02:00 committed by GitHub
parent f217eb8ad8
commit 4632661bc7
269 changed files with 3993 additions and 3868 deletions

View file

@ -26,12 +26,6 @@ java.util.concurrent.ThreadLocalRandom
java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageDigests java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageDigests
@defaultMessage this should not have been added to lucene in the first place
org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey()
@defaultMessage Soon to be removed
org.apache.lucene.document.FieldType#numericType()
@defaultMessage Don't use MethodHandles in slow ways, don't be lenient in tests. @defaultMessage Don't use MethodHandles in slow ways, don't be lenient in tests.
java.lang.invoke.MethodHandle#invoke(java.lang.Object[]) java.lang.invoke.MethodHandle#invoke(java.lang.Object[])
java.lang.invoke.MethodHandle#invokeWithArguments(java.lang.Object[]) java.lang.invoke.MethodHandle#invokeWithArguments(java.lang.Object[])

View file

@ -36,16 +36,6 @@ org.apache.lucene.index.IndexReader#decRef()
org.apache.lucene.index.IndexReader#incRef() org.apache.lucene.index.IndexReader#incRef()
org.apache.lucene.index.IndexReader#tryIncRef() org.apache.lucene.index.IndexReader#tryIncRef()
@defaultMessage Close listeners can only installed via ElasticsearchDirectoryReader#addReaderCloseListener
org.apache.lucene.index.IndexReader#addReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener)
org.apache.lucene.index.IndexReader#removeReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener)
@defaultMessage Pass the precision step from the mappings explicitly instead
org.apache.lucene.search.LegacyNumericRangeQuery#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean)
org.apache.lucene.search.LegacyNumericRangeQuery#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean)
org.apache.lucene.search.LegacyNumericRangeQuery#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean)
org.apache.lucene.search.LegacyNumericRangeQuery#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean)
@defaultMessage Only use wait / notify when really needed try to use concurrency primitives, latches or callbacks instead. @defaultMessage Only use wait / notify when really needed try to use concurrency primitives, latches or callbacks instead.
java.lang.Object#wait() java.lang.Object#wait()
java.lang.Object#wait(long) java.lang.Object#wait(long)

View file

@ -1,6 +1,6 @@
# When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy # When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy
elasticsearch = 6.0.0-alpha1 elasticsearch = 6.0.0-alpha1
lucene = 6.5.0 lucene = 7.0.0-snapshot-89f6d17
# optional dependencies # optional dependencies
spatial4j = 0.6 spatial4j = 0.6

View file

@ -1 +0,0 @@
3989779b05ecd0ace6affe19223b1c27156604f1

View file

@ -0,0 +1 @@
e69234c2e898d86a53edbe8d22e33bebc45286cd

View file

@ -1 +0,0 @@
6a8660e7133f357ef40d9cac26316ccd9937a2eb

View file

@ -0,0 +1 @@
48172a8e1fe6562f55ab671d42af53652794d5df

View file

@ -1 +0,0 @@
ff176c9bde4228b43827849f5d2ff2e2717e3297

View file

@ -0,0 +1 @@
3dab251d4c7ab4ff5095e5f1d1e127ec2cf3c07d

View file

@ -1 +0,0 @@
10d2e5b36f460527ac9b948be0ec3077bde5b0ca

View file

@ -0,0 +1 @@
c01ae8a23b733d75d058a76bd85fcb49b9fd06fd

View file

@ -1 +0,0 @@
0019bb6a631ea0123e8e553b0510fa81c9d3c3eb

View file

@ -0,0 +1 @@
c53df048b97946fe66035505306b5651b702adb1

View file

@ -1 +0,0 @@
dad85baba266793b9ceb80a9b08c4ee9838e09df

View file

@ -0,0 +1 @@
1ecb349ba29abab75359e5125ac8a94fc81441d5

View file

@ -1 +0,0 @@
938f9f7efe8a403fd57c99aedd75d040d9caa896

View file

@ -0,0 +1 @@
e5f53b38652b1284ff254fba39e624ec117aef7d

View file

@ -1 +0,0 @@
afdff39ecb30f6e2c6f056a5bdfcb13d928a25af

View file

@ -0,0 +1 @@
2f340ed3f46d6b4c89fa31975b675c19028c15eb

View file

@ -1 +0,0 @@
8e3971a008070712d57b59cf1f7b44c0d9d3df25

View file

@ -0,0 +1 @@
a13862fb62cc1e516d16d6b6bb3cdb906c4925f6

View file

@ -1 +0,0 @@
225b904edf91ccdffffa398e1924ebadd5677c09

View file

@ -0,0 +1 @@
4e014f72a588453bae7dd1a555d741cf3bf39032

View file

@ -1 +0,0 @@
5c994fc5dc4f37133a861571211303d81c5d51ff

View file

@ -0,0 +1 @@
5e87d61c604d6b1c0ee5c38f09441d1b8b9c8c2b

View file

@ -1 +0,0 @@
553b7b13bef994f14076a85557df03cad67322e9

View file

@ -0,0 +1 @@
be14aa163b339403d8ec904493c1be5dfa9baeaf

View file

@ -1 +0,0 @@
73deae791d861820974600705ba06e9f801cbe56

View file

@ -0,0 +1 @@
a2c13be0fe4c5a98a30ec6ae673be1442409817c

View file

@ -1 +0,0 @@
c2aad69500dac79338ef45f570cab47bec3d2724

View file

@ -0,0 +1 @@
92b8282e474845fdae31f9f239f953bc7164401f

View file

@ -1 +0,0 @@
acf211f2bf901dfc8155a46c5a42c5650edf74ef

View file

@ -0,0 +1 @@
1c4aaea267ed41657ebf01769bfddbcab5b27414

View file

@ -296,16 +296,15 @@ public abstract class BlendedTermQuery extends Query {
return Objects.hash(classHash(), Arrays.hashCode(equalsTerms())); return Objects.hash(classHash(), Arrays.hashCode(equalsTerms()));
} }
public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final boolean disableCoord) { public static BlendedTermQuery booleanBlendedQuery(Term[] terms) {
return booleanBlendedQuery(terms, null, disableCoord); return booleanBlendedQuery(terms, null);
} }
public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final float[] boosts, final boolean disableCoord) { public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final float[] boosts) {
return new BlendedTermQuery(terms, boosts) { return new BlendedTermQuery(terms, boosts) {
@Override @Override
protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) { protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) {
BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder();
booleanQueryBuilder.setDisableCoord(disableCoord);
for (int i = 0; i < terms.length; i++) { for (int i = 0; i < terms.length; i++) {
Query query = new TermQuery(terms[i], ctx[i]); Query query = new TermQuery(terms[i], ctx[i]);
if (boosts != null && boosts[i] != 1f) { if (boosts != null && boosts[i] != 1f) {
@ -318,14 +317,12 @@ public abstract class BlendedTermQuery extends Query {
}; };
} }
public static BlendedTermQuery commonTermsBlendedQuery(Term[] terms, final float[] boosts, final boolean disableCoord, final float maxTermFrequency) { public static BlendedTermQuery commonTermsBlendedQuery(Term[] terms, final float[] boosts, final float maxTermFrequency) {
return new BlendedTermQuery(terms, boosts) { return new BlendedTermQuery(terms, boosts) {
@Override @Override
protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) { protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) {
BooleanQuery.Builder highBuilder = new BooleanQuery.Builder(); BooleanQuery.Builder highBuilder = new BooleanQuery.Builder();
highBuilder.setDisableCoord(disableCoord);
BooleanQuery.Builder lowBuilder = new BooleanQuery.Builder(); BooleanQuery.Builder lowBuilder = new BooleanQuery.Builder();
lowBuilder.setDisableCoord(disableCoord);
for (int i = 0; i < terms.length; i++) { for (int i = 0; i < terms.length; i++) {
Query query = new TermQuery(terms[i], ctx[i]); Query query = new TermQuery(terms[i], ctx[i]);
if (boosts != null && boosts[i] != 1f) { if (boosts != null && boosts[i] != 1f) {
@ -343,7 +340,6 @@ public abstract class BlendedTermQuery extends Query {
BooleanQuery low = lowBuilder.build(); BooleanQuery low = lowBuilder.build();
if (low.clauses().isEmpty()) { if (low.clauses().isEmpty()) {
BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder(); BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
queryBuilder.setDisableCoord(disableCoord);
for (BooleanClause booleanClause : high) { for (BooleanClause booleanClause : high) {
queryBuilder.add(booleanClause.getQuery(), Occur.MUST); queryBuilder.add(booleanClause.getQuery(), Occur.MUST);
} }
@ -352,7 +348,6 @@ public abstract class BlendedTermQuery extends Query {
return low; return low;
} else { } else {
return new BooleanQuery.Builder() return new BooleanQuery.Builder()
.setDisableCoord(true)
.add(high, BooleanClause.Occur.SHOULD) .add(high, BooleanClause.Occur.SHOULD)
.add(low, BooleanClause.Occur.MUST) .add(low, BooleanClause.Occur.MUST)
.build(); .build();

View file

@ -35,8 +35,8 @@ public class ExtendedCommonTermsQuery extends CommonTermsQuery {
private final MappedFieldType fieldType; private final MappedFieldType fieldType;
public ExtendedCommonTermsQuery(Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, boolean disableCoord, MappedFieldType fieldType) { public ExtendedCommonTermsQuery(Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, MappedFieldType fieldType) {
super(highFreqOccur, lowFreqOccur, maxTermFrequency, disableCoord); super(highFreqOccur, lowFreqOccur, maxTermFrequency);
this.fieldType = fieldType; this.fieldType = fieldType;
} }

View file

@ -57,8 +57,8 @@ public final class MinDocQuery extends Query {
} }
@Override @Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
return new ConstantScoreWeight(this) { return new ConstantScoreWeight(this, boost) {
@Override @Override
public Scorer scorer(LeafReaderContext context) throws IOException { public Scorer scorer(LeafReaderContext context) throws IOException {
final int maxDoc = context.reader().maxDoc(); final int maxDoc = context.reader().maxDoc();

View file

@ -25,9 +25,7 @@ import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.queryparser.analyzing.AnalyzingQueryParser;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.FuzzyQuery;
@ -70,7 +68,7 @@ import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfN
* Also breaks fields with [type].[name] into a boolean query that must include the type * Also breaks fields with [type].[name] into a boolean query that must include the type
* as well as the query on the name. * as well as the query on the name.
*/ */
public class MapperQueryParser extends AnalyzingQueryParser { public class MapperQueryParser extends QueryParser {
public static final Map<String, FieldQueryExtension> FIELD_QUERY_EXTENSIONS; public static final Map<String, FieldQueryExtension> FIELD_QUERY_EXTENSIONS;
@ -103,14 +101,13 @@ public class MapperQueryParser extends AnalyzingQueryParser {
setAnalyzer(settings.analyzer()); setAnalyzer(settings.analyzer());
setMultiTermRewriteMethod(settings.rewriteMethod()); setMultiTermRewriteMethod(settings.rewriteMethod());
setEnablePositionIncrements(settings.enablePositionIncrements()); setEnablePositionIncrements(settings.enablePositionIncrements());
setSplitOnWhitespace(settings.splitOnWhitespace());
setAutoGeneratePhraseQueries(settings.autoGeneratePhraseQueries()); setAutoGeneratePhraseQueries(settings.autoGeneratePhraseQueries());
setMaxDeterminizedStates(settings.maxDeterminizedStates()); setMaxDeterminizedStates(settings.maxDeterminizedStates());
setAllowLeadingWildcard(settings.allowLeadingWildcard()); setAllowLeadingWildcard(settings.allowLeadingWildcard());
setLowercaseExpandedTerms(false);
setPhraseSlop(settings.phraseSlop()); setPhraseSlop(settings.phraseSlop());
setDefaultOperator(settings.defaultOperator()); setDefaultOperator(settings.defaultOperator());
setFuzzyPrefixLength(settings.fuzzyPrefixLength()); setFuzzyPrefixLength(settings.fuzzyPrefixLength());
setSplitOnWhitespace(settings.splitOnWhitespace());
} }
/** /**
@ -175,7 +172,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
} }
} }
if (clauses.isEmpty()) return null; // happens for stopwords if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses); return getBooleanQuery(clauses);
} }
} else { } else {
return getFieldQuerySingle(field, queryText, quoted); return getFieldQuerySingle(field, queryText, quoted);
@ -277,7 +274,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
} }
} }
if (clauses.isEmpty()) return null; // happens for stopwords if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses); return getBooleanQuery(clauses);
} }
} else { } else {
return super.getFieldQuery(field, queryText, slop); return super.getFieldQuery(field, queryText, slop);
@ -328,7 +325,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
} }
} }
if (clauses.isEmpty()) return null; // happens for stopwords if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses); return getBooleanQuery(clauses);
} }
} }
@ -386,7 +383,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD)); clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD));
} }
} }
return getBooleanQueryCoordDisabled(clauses); return getBooleanQuery(clauses);
} }
} else { } else {
return getFuzzyQuerySingle(field, termStr, minSimilarity); return getFuzzyQuerySingle(field, termStr, minSimilarity);
@ -450,7 +447,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
} }
} }
if (clauses.isEmpty()) return null; // happens for stopwords if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses); return getBooleanQuery(clauses);
} }
} else { } else {
return getPrefixQuerySingle(field, termStr); return getPrefixQuerySingle(field, termStr);
@ -559,7 +556,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
innerClauses.add(new BooleanClause(super.getPrefixQuery(field, token), innerClauses.add(new BooleanClause(super.getPrefixQuery(field, token),
BooleanClause.Occur.SHOULD)); BooleanClause.Occur.SHOULD));
} }
posQuery = getBooleanQueryCoordDisabled(innerClauses); posQuery = getBooleanQuery(innerClauses);
} }
clauses.add(new BooleanClause(posQuery, clauses.add(new BooleanClause(posQuery,
getDefaultOperator() == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD)); getDefaultOperator() == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD));
@ -612,7 +609,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
} }
} }
if (clauses.isEmpty()) return null; // happens for stopwords if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses); return getBooleanQuery(clauses);
} }
} else { } else {
return getWildcardQuerySingle(field, termStr); return getWildcardQuerySingle(field, termStr);
@ -676,7 +673,7 @@ public class MapperQueryParser extends AnalyzingQueryParser {
} }
} }
if (clauses.isEmpty()) return null; // happens for stopwords if (clauses.isEmpty()) return null; // happens for stopwords
return getBooleanQueryCoordDisabled(clauses); return getBooleanQuery(clauses);
} }
} else { } else {
return getRegexpQuerySingle(field, termStr); return getRegexpQuerySingle(field, termStr);
@ -713,19 +710,6 @@ public class MapperQueryParser extends AnalyzingQueryParser {
} }
} }
/**
* @deprecated review all use of this, don't rely on coord
*/
@Deprecated
protected Query getBooleanQueryCoordDisabled(List<BooleanClause> clauses) throws ParseException {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setDisableCoord(true);
for (BooleanClause clause : clauses) {
builder.add(clause);
}
return fixNegativeQueryIfNeeded(builder.build());
}
@Override @Override
protected Query getBooleanQuery(List<BooleanClause> clauses) throws ParseException { protected Query getBooleanQuery(List<BooleanClause> clauses) throws ParseException {

View file

@ -22,31 +22,32 @@ import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.fielddata.AbstractNumericDocValues;
import org.elasticsearch.index.fielddata.AbstractSortedDocValues;
import java.io.IOException; import java.io.IOException;
import java.util.Collection;
/** /**
* Utility class that ensures that a single collapse key is extracted per document. * Utility class that ensures that a single collapse key is extracted per document.
*/ */
abstract class CollapsingDocValuesSource<T> { abstract class CollapsingDocValuesSource<T> extends GroupSelector<T> {
protected final String field; protected final String field;
CollapsingDocValuesSource(String field) throws IOException { CollapsingDocValuesSource(String field) throws IOException {
this.field = field; this.field = field;
} }
abstract T get(int doc); @Override
public void setGroups(Collection<SearchGroup<T>> groups) {
abstract T copy(T value, T reuse); throw new UnsupportedOperationException();
}
abstract void setNextReader(LeafReader reader) throws IOException;
/** /**
* Implementation for {@link NumericDocValues} and {@link SortedNumericDocValues}. * Implementation for {@link NumericDocValues} and {@link SortedNumericDocValues}.
@ -54,35 +55,43 @@ abstract class CollapsingDocValuesSource<T> {
*/ */
static class Numeric extends CollapsingDocValuesSource<Long> { static class Numeric extends CollapsingDocValuesSource<Long> {
private NumericDocValues values; private NumericDocValues values;
private Bits docsWithField; private long value;
private boolean hasValue;
Numeric(String field) throws IOException { Numeric(String field) throws IOException {
super(field); super(field);
} }
@Override @Override
public Long get(int doc) { public State advanceTo(int doc) throws IOException {
if (docsWithField.get(doc)) { if (values.advanceExact(doc)) {
return values.get(doc); hasValue = true;
value = values.longValue();
return State.ACCEPT;
} else { } else {
return null; hasValue = false;
return State.SKIP;
} }
} }
@Override @Override
public Long copy(Long value, Long reuse) { public Long currentValue() {
return value; return hasValue ? value : null;
} }
@Override @Override
public void setNextReader(LeafReader reader) throws IOException { public Long copyValue() {
return currentValue();
}
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
LeafReader reader = readerContext.reader();
DocValuesType type = getDocValuesType(reader, field); DocValuesType type = getDocValuesType(reader, field);
if (type == null || type == DocValuesType.NONE) { if (type == null || type == DocValuesType.NONE) {
values = DocValues.emptyNumeric(); values = DocValues.emptyNumeric();
docsWithField = new Bits.MatchNoBits(reader.maxDoc());
return ; return ;
} }
docsWithField = DocValues.getDocsWithField(reader, field);
switch (type) { switch (type) {
case NUMERIC: case NUMERIC:
values = DocValues.getNumeric(reader, field); values = DocValues.getNumeric(reader, field);
@ -92,17 +101,34 @@ abstract class CollapsingDocValuesSource<T> {
final SortedNumericDocValues sorted = DocValues.getSortedNumeric(reader, field); final SortedNumericDocValues sorted = DocValues.getSortedNumeric(reader, field);
values = DocValues.unwrapSingleton(sorted); values = DocValues.unwrapSingleton(sorted);
if (values == null) { if (values == null) {
values = new NumericDocValues() { values = new AbstractNumericDocValues() {
private long value;
@Override @Override
public long get(int docID) { public boolean advanceExact(int target) throws IOException {
sorted.setDocument(docID); if (sorted.advanceExact(target)) {
assert sorted.count() > 0; if (sorted.docValueCount() > 1) {
if (sorted.count() > 1) { throw new IllegalStateException("failed to collapse " + target +
throw new IllegalStateException("failed to collapse " + docID + ", the collapse field must be single valued");
", the collapse field must be single valued"); }
value = sorted.nextValue();
return true;
} else {
return false;
} }
return sorted.valueAt(0);
} }
@Override
public int docID() {
return sorted.docID();
}
@Override
public long longValue() throws IOException {
return value;
}
}; };
} }
break; break;
@ -119,47 +145,56 @@ abstract class CollapsingDocValuesSource<T> {
* Fails with an {@link IllegalStateException} if a document contains multiple values for the specified field. * Fails with an {@link IllegalStateException} if a document contains multiple values for the specified field.
*/ */
static class Keyword extends CollapsingDocValuesSource<BytesRef> { static class Keyword extends CollapsingDocValuesSource<BytesRef> {
private Bits docsWithField;
private SortedDocValues values; private SortedDocValues values;
private int ord;
Keyword(String field) throws IOException { Keyword(String field) throws IOException {
super(field); super(field);
} }
@Override @Override
public BytesRef get(int doc) { public org.apache.lucene.search.grouping.GroupSelector.State advanceTo(int doc)
if (docsWithField.get(doc)) { throws IOException {
return values.get(doc); if (values.advanceExact(doc)) {
ord = values.ordValue();
return State.ACCEPT;
} else { } else {
return null; ord = -1;
return State.SKIP;
} }
} }
@Override @Override
public BytesRef copy(BytesRef value, BytesRef reuse) { public BytesRef currentValue() {
if (ord == -1) {
return null;
} else {
try {
return values.lookupOrd(ord);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
@Override
public BytesRef copyValue() {
BytesRef value = currentValue();
if (value == null) { if (value == null) {
return null; return null;
}
if (reuse != null) {
reuse.bytes = ArrayUtil.grow(reuse.bytes, value.length);
reuse.offset = 0;
reuse.length = value.length;
System.arraycopy(value.bytes, value.offset, reuse.bytes, 0, value.length);
return reuse;
} else { } else {
return BytesRef.deepCopyOf(value); return BytesRef.deepCopyOf(value);
} }
} }
@Override @Override
public void setNextReader(LeafReader reader) throws IOException { public void setNextReader(LeafReaderContext readerContext) throws IOException {
LeafReader reader = readerContext.reader();
DocValuesType type = getDocValuesType(reader, field); DocValuesType type = getDocValuesType(reader, field);
if (type == null || type == DocValuesType.NONE) { if (type == null || type == DocValuesType.NONE) {
values = DocValues.emptySorted(); values = DocValues.emptySorted();
docsWithField = new Bits.MatchNoBits(reader.maxDoc());
return ; return ;
} }
docsWithField = DocValues.getDocsWithField(reader, field);
switch (type) { switch (type) {
case SORTED: case SORTED:
values = DocValues.getSorted(reader, field); values = DocValues.getSorted(reader, field);
@ -169,20 +204,36 @@ abstract class CollapsingDocValuesSource<T> {
final SortedSetDocValues sorted = DocValues.getSortedSet(reader, field); final SortedSetDocValues sorted = DocValues.getSortedSet(reader, field);
values = DocValues.unwrapSingleton(sorted); values = DocValues.unwrapSingleton(sorted);
if (values == null) { if (values == null) {
values = new SortedDocValues() { values = new AbstractSortedDocValues() {
private int ord;
@Override @Override
public int getOrd(int docID) { public boolean advanceExact(int target) throws IOException {
sorted.setDocument(docID); if (sorted.advanceExact(target)) {
int ord = (int) sorted.nextOrd(); ord = (int) sorted.nextOrd();
if (sorted.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { if (sorted.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) {
throw new IllegalStateException("failed to collapse " + docID + throw new IllegalStateException("failed to collapse " + target +
", the collapse field must be single valued"); ", the collapse field must be single valued");
}
return true;
} else {
return false;
} }
}
@Override
public int docID() {
return sorted.docID();
}
@Override
public int ordValue() {
return ord; return ord;
} }
@Override @Override
public BytesRef lookupOrd(int ord) { public BytesRef lookupOrd(int ord) throws IOException {
return sorted.lookupOrd(ord); return sorted.lookupOrd(ord);
} }

View file

@ -18,13 +18,11 @@
*/ */
package org.apache.lucene.search.grouping; package org.apache.lucene.search.grouping;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BytesRef;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
@ -37,7 +35,7 @@ import static org.apache.lucene.search.SortField.Type.SCORE;
* output. The collapsing is done in a single pass by selecting only the top sorted document per collapse key. * output. The collapsing is done in a single pass by selecting only the top sorted document per collapse key.
* The value used for the collapse key of each group can be found in {@link CollapseTopFieldDocs#collapseValues}. * The value used for the collapse key of each group can be found in {@link CollapseTopFieldDocs#collapseValues}.
*/ */
public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCollector<T> { public final class CollapsingTopDocsCollector<T> extends FirstPassGroupingCollector<T> {
protected final String collapseField; protected final String collapseField;
protected final Sort sort; protected final Sort sort;
@ -47,9 +45,9 @@ public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCol
private float maxScore; private float maxScore;
private final boolean trackMaxScore; private final boolean trackMaxScore;
private CollapsingTopDocsCollector(String collapseField, Sort sort, CollapsingTopDocsCollector(GroupSelector<T> groupSelector, String collapseField, Sort sort,
int topN, boolean trackMaxScore) throws IOException { int topN, boolean trackMaxScore) throws IOException {
super(sort, topN); super(groupSelector, sort, topN);
this.collapseField = collapseField; this.collapseField = collapseField;
this.trackMaxScore = trackMaxScore; this.trackMaxScore = trackMaxScore;
if (trackMaxScore) { if (trackMaxScore) {
@ -65,7 +63,7 @@ public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCol
* {@link CollapseTopFieldDocs}. The collapsing needs only one pass so we can create the final top docs at the end * {@link CollapseTopFieldDocs}. The collapsing needs only one pass so we can create the final top docs at the end
* of the first pass. * of the first pass.
*/ */
public CollapseTopFieldDocs getTopDocs() { public CollapseTopFieldDocs getTopDocs() throws IOException {
Collection<SearchGroup<T>> groups = super.getTopGroups(0, true); Collection<SearchGroup<T>> groups = super.getTopGroups(0, true);
if (groups == null) { if (groups == null) {
return new CollapseTopFieldDocs(collapseField, totalHitCount, new ScoreDoc[0], return new CollapseTopFieldDocs(collapseField, totalHitCount, new ScoreDoc[0],
@ -121,57 +119,6 @@ public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCol
totalHitCount++; totalHitCount++;
} }
private static class Numeric extends CollapsingTopDocsCollector<Long> {
private final CollapsingDocValuesSource.Numeric source;
private Numeric(String collapseField, Sort sort, int topN, boolean trackMaxScore) throws IOException {
super(collapseField, sort, topN, trackMaxScore);
source = new CollapsingDocValuesSource.Numeric(collapseField);
}
@Override
protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
super.doSetNextReader(readerContext);
source.setNextReader(readerContext.reader());
}
@Override
protected Long getDocGroupValue(int doc) {
return source.get(doc);
}
@Override
protected Long copyDocGroupValue(Long groupValue, Long reuse) {
return source.copy(groupValue, reuse);
}
}
private static class Keyword extends CollapsingTopDocsCollector<BytesRef> {
private final CollapsingDocValuesSource.Keyword source;
private Keyword(String collapseField, Sort sort, int topN, boolean trackMaxScore) throws IOException {
super(collapseField, sort, topN, trackMaxScore);
source = new CollapsingDocValuesSource.Keyword(collapseField);
}
@Override
protected void doSetNextReader(LeafReaderContext readerContext) throws IOException {
super.doSetNextReader(readerContext);
source.setNextReader(readerContext.reader());
}
@Override
protected BytesRef getDocGroupValue(int doc) {
return source.get(doc);
}
@Override
protected BytesRef copyDocGroupValue(BytesRef groupValue, BytesRef reuse) {
return source.copy(groupValue, reuse);
}
}
/** /**
* Create a collapsing top docs collector on a {@link org.apache.lucene.index.NumericDocValues} field. * Create a collapsing top docs collector on a {@link org.apache.lucene.index.NumericDocValues} field.
* It accepts also {@link org.apache.lucene.index.SortedNumericDocValues} field but * It accepts also {@link org.apache.lucene.index.SortedNumericDocValues} field but
@ -189,7 +136,8 @@ public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCol
*/ */
public static CollapsingTopDocsCollector<?> createNumeric(String collapseField, Sort sort, public static CollapsingTopDocsCollector<?> createNumeric(String collapseField, Sort sort,
int topN, boolean trackMaxScore) throws IOException { int topN, boolean trackMaxScore) throws IOException {
return new Numeric(collapseField, sort, topN, trackMaxScore); return new CollapsingTopDocsCollector<>(new CollapsingDocValuesSource.Numeric(collapseField),
collapseField, sort, topN, trackMaxScore);
} }
/** /**
@ -208,7 +156,8 @@ public abstract class CollapsingTopDocsCollector<T> extends FirstPassGroupingCol
*/ */
public static CollapsingTopDocsCollector<?> createKeyword(String collapseField, Sort sort, public static CollapsingTopDocsCollector<?> createKeyword(String collapseField, Sort sort,
int topN, boolean trackMaxScore) throws IOException { int topN, boolean trackMaxScore) throws IOException {
return new Keyword(collapseField, sort, topN, trackMaxScore); return new CollapsingTopDocsCollector<>(new CollapsingDocValuesSource.Keyword(collapseField),
collapseField, sort, topN, trackMaxScore);
} }
} }

View file

@ -82,7 +82,7 @@ public class Version implements Comparable<Version> {
public static final Version V_5_5_0_UNRELEASED = new Version(V_5_5_0_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_5_0); public static final Version V_5_5_0_UNRELEASED = new Version(V_5_5_0_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_5_0);
public static final int V_6_0_0_alpha1_ID_UNRELEASED = 6000001; public static final int V_6_0_0_alpha1_ID_UNRELEASED = 6000001;
public static final Version V_6_0_0_alpha1_UNRELEASED = public static final Version V_6_0_0_alpha1_UNRELEASED =
new Version(V_6_0_0_alpha1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_5_0); new Version(V_6_0_0_alpha1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final Version CURRENT = V_6_0_0_alpha1_UNRELEASED; public static final Version CURRENT = V_6_0_0_alpha1_UNRELEASED;
// unreleased versions must be added to the above list with the suffix _UNRELEASED (with the exception of CURRENT) // unreleased versions must be added to the above list with the suffix _UNRELEASED (with the exception of CURRENT)

View file

@ -18,13 +18,13 @@
*/ */
package org.elasticsearch.common.geo; package org.elasticsearch.common.geo;
import org.apache.lucene.geo.Rectangle;
import org.apache.lucene.spatial.util.MortonEncoder;
import org.apache.lucene.util.BitUtil;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.geo.Rectangle;
import org.apache.lucene.util.BitUtil;
/** /**
* Utilities for converting to/from the GeoHash standard * Utilities for converting to/from the GeoHash standard
* *
@ -42,19 +42,35 @@ public class GeoHashUtils {
/** maximum precision for geohash strings */ /** maximum precision for geohash strings */
public static final int PRECISION = 12; public static final int PRECISION = 12;
private static final short MORTON_OFFSET = (GeoPointField.BITS<<1) - (PRECISION*5); /** number of bits used for quantizing latitude and longitude values */
public static final short BITS = 31;
/** scaling factors to convert lat/lon into unsigned space */
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
private static final short MORTON_OFFSET = (BITS<<1) - (PRECISION*5);
// No instance: // No instance:
private GeoHashUtils() { private GeoHashUtils() {
} }
/*************************
* 31 bit encoding utils *
*************************/
public static long encodeLatLon(final double lat, final double lon) {
long result = MortonEncoder.encode(lat, lon);
if (result == 0xFFFFFFFFFFFFFFFFL) {
return result & 0xC000000000000000L;
}
return result >>> 2;
}
/** /**
* Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level) * Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
*/ */
public static final long longEncode(final double lon, final double lat, final int level) { public static final long longEncode(final double lon, final double lat, final int level) {
// shift to appropriate level // shift to appropriate level
final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET); final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET);
return ((BitUtil.flipFlop(GeoPointField.encodeLatLon(lat, lon)) >>> msf) << 4) | level; return ((BitUtil.flipFlop(encodeLatLon(lat, lon)) >>> msf) << 4) | level;
} }
/** /**
@ -120,7 +136,7 @@ public class GeoHashUtils {
*/ */
public static final String stringEncode(final double lon, final double lat, final int level) { public static final String stringEncode(final double lon, final double lat, final int level) {
// convert to geohashlong // convert to geohashlong
final long ghLong = fromMorton(GeoPointField.encodeLatLon(lat, lon), level); final long ghLong = fromMorton(encodeLatLon(lat, lon), level);
return stringEncode(ghLong); return stringEncode(ghLong);
} }
@ -141,7 +157,7 @@ public class GeoHashUtils {
StringBuilder geoHash = new StringBuilder(); StringBuilder geoHash = new StringBuilder();
short precision = 0; short precision = 0;
final short msf = (GeoPointField.BITS<<1)-5; final short msf = (BITS<<1)-5;
long mask = 31L<<msf; long mask = 31L<<msf;
do { do {
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]); geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
@ -303,13 +319,31 @@ public class GeoHashUtils {
return neighbors; return neighbors;
} }
/** decode longitude value from morton encoded geo point */
public static final double decodeLongitude(final long hash) {
return unscaleLon(BitUtil.deinterleave(hash));
}
/** decode latitude value from morton encoded geo point */
public static final double decodeLatitude(final long hash) {
return unscaleLat(BitUtil.deinterleave(hash >>> 1));
}
private static double unscaleLon(final long val) {
return (val / LON_SCALE) - 180;
}
private static double unscaleLat(final long val) {
return (val / LAT_SCALE) - 90;
}
/** returns the latitude value from the string based geohash */ /** returns the latitude value from the string based geohash */
public static final double decodeLatitude(final String geohash) { public static final double decodeLatitude(final String geohash) {
return GeoPointField.decodeLatitude(mortonEncode(geohash)); return decodeLatitude(mortonEncode(geohash));
} }
/** returns the latitude value from the string based geohash */ /** returns the latitude value from the string based geohash */
public static final double decodeLongitude(final String geohash) { public static final double decodeLongitude(final String geohash) {
return GeoPointField.decodeLongitude(mortonEncode(geohash)); return decodeLongitude(mortonEncode(geohash));
} }
} }

View file

@ -23,7 +23,6 @@ import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.geo.GeoEncodingUtils;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -87,8 +86,8 @@ public final class GeoPoint {
} }
public GeoPoint resetFromIndexHash(long hash) { public GeoPoint resetFromIndexHash(long hash) {
lon = GeoPointField.decodeLongitude(hash); lon = GeoHashUtils.decodeLongitude(hash);
lat = GeoPointField.decodeLatitude(hash); lat = GeoHashUtils.decodeLatitude(hash);
return this; return this;
} }
@ -112,7 +111,7 @@ public final class GeoPoint {
public GeoPoint resetFromGeoHash(String geohash) { public GeoPoint resetFromGeoHash(String geohash) {
final long hash = mortonEncode(geohash); final long hash = mortonEncode(geohash);
return this.reset(GeoPointField.decodeLatitude(hash), GeoPointField.decodeLongitude(hash)); return this.reset(GeoHashUtils.decodeLatitude(hash), GeoHashUtils.decodeLongitude(hash));
} }
public GeoPoint resetFromGeoHash(long geohashLong) { public GeoPoint resetFromGeoHash(long geohashLong) {

View file

@ -22,7 +22,6 @@ package org.elasticsearch.common.geo;
import org.apache.lucene.geo.Rectangle; import org.apache.lucene.geo.Rectangle;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.SloppyMath; import org.apache.lucene.util.SloppyMath;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.DistanceUnit;
@ -511,35 +510,40 @@ public class GeoUtils {
final GeoPoint... fromPoints) { final GeoPoint... fromPoints) {
final GeoPointValues singleValues = FieldData.unwrapSingleton(geoPointValues); final GeoPointValues singleValues = FieldData.unwrapSingleton(geoPointValues);
if (singleValues != null && fromPoints.length == 1) { if (singleValues != null && fromPoints.length == 1) {
final Bits docsWithField = FieldData.unwrapSingletonBits(geoPointValues);
return FieldData.singleton(new NumericDoubleValues() { return FieldData.singleton(new NumericDoubleValues() {
@Override @Override
public double get(int docID) { public boolean advanceExact(int doc) throws IOException {
if (docsWithField != null && !docsWithField.get(docID)) { return singleValues.advanceExact(doc);
return 0d; }
}
final GeoPoint to = singleValues.get(docID); @Override
public double doubleValue() throws IOException {
final GeoPoint from = fromPoints[0]; final GeoPoint from = fromPoints[0];
final GeoPoint to = singleValues.geoPointValue();
return distance.calculate(from.lat(), from.lon(), to.lat(), to.lon(), unit); return distance.calculate(from.lat(), from.lon(), to.lat(), to.lon(), unit);
} }
}, docsWithField); });
} else { } else {
return new SortingNumericDoubleValues() { return new SortingNumericDoubleValues() {
@Override @Override
public void setDocument(int doc) { public boolean advanceExact(int target) throws IOException {
geoPointValues.setDocument(doc); if (geoPointValues.advanceExact(target)) {
resize(geoPointValues.count() * fromPoints.length); resize(geoPointValues.docValueCount() * fromPoints.length);
int v = 0; int v = 0;
for (GeoPoint from : fromPoints) { for (int i = 0; i < geoPointValues.docValueCount(); ++i) {
for (int i = 0; i < geoPointValues.count(); ++i) { final GeoPoint point = geoPointValues.nextValue();
final GeoPoint point = geoPointValues.valueAt(i); for (GeoPoint from : fromPoints) {
values[v] = distance.calculate(from.lat(), from.lon(), point.lat(), point.lon(), unit); values[v] = distance.calculate(from.lat(), from.lon(), point.lat(), point.lon(), unit);
v++; v++;
}
} }
sort();
return true;
} else {
return false;
} }
sort();
} }
}; };
} }

View file

@ -51,14 +51,14 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.search.SortedNumericSortField;
import org.apache.lucene.search.SortedSetSortField;
import org.apache.lucene.search.TimeLimitingCollector; import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.search.grouping.CollapseTopFieldDocs; import org.apache.lucene.search.grouping.CollapseTopFieldDocs;
import org.apache.lucene.search.SortedNumericSortField;
import org.apache.lucene.search.SortedSetSortField;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexInput;
@ -89,9 +89,9 @@ import java.util.Map;
import java.util.Objects; import java.util.Objects;
public class Lucene { public class Lucene {
public static final String LATEST_DOC_VALUES_FORMAT = "Lucene54"; public static final String LATEST_DOC_VALUES_FORMAT = "Lucene70";
public static final String LATEST_POSTINGS_FORMAT = "Lucene50"; public static final String LATEST_POSTINGS_FORMAT = "Lucene50";
public static final String LATEST_CODEC = "Lucene62"; public static final String LATEST_CODEC = "Lucene70";
static { static {
Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class); Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class);

View file

@ -19,8 +19,8 @@
package org.elasticsearch.common.lucene; package org.elasticsearch.common.lucene;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReader.CoreClosedListener;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardUtils; import org.elasticsearch.index.shard.ShardUtils;
@ -46,8 +46,8 @@ import java.util.concurrent.ConcurrentHashMap;
*/ */
public final class ShardCoreKeyMap { public final class ShardCoreKeyMap {
private final Map<Object, ShardId> coreKeyToShard; private final Map<IndexReader.CacheKey, ShardId> coreKeyToShard;
private final Map<String, Set<Object>> indexToCoreKey; private final Map<String, Set<IndexReader.CacheKey>> indexToCoreKey;
public ShardCoreKeyMap() { public ShardCoreKeyMap() {
coreKeyToShard = new ConcurrentHashMap<>(); coreKeyToShard = new ConcurrentHashMap<>();
@ -63,7 +63,11 @@ public final class ShardCoreKeyMap {
if (shardId == null) { if (shardId == null) {
throw new IllegalArgumentException("Could not extract shard id from " + reader); throw new IllegalArgumentException("Could not extract shard id from " + reader);
} }
final Object coreKey = reader.getCoreCacheKey(); final IndexReader.CacheHelper cacheHelper = reader.getCoreCacheHelper();
if (cacheHelper == null) {
throw new IllegalArgumentException("Reader " + reader + " does not support caching");
}
final IndexReader.CacheKey coreKey = cacheHelper.getKey();
if (coreKeyToShard.containsKey(coreKey)) { if (coreKeyToShard.containsKey(coreKey)) {
// Do this check before entering the synchronized block in order to // Do this check before entering the synchronized block in order to
@ -75,18 +79,18 @@ public final class ShardCoreKeyMap {
final String index = shardId.getIndexName(); final String index = shardId.getIndexName();
synchronized (this) { synchronized (this) {
if (coreKeyToShard.containsKey(coreKey) == false) { if (coreKeyToShard.containsKey(coreKey) == false) {
Set<Object> objects = indexToCoreKey.get(index); Set<IndexReader.CacheKey> objects = indexToCoreKey.get(index);
if (objects == null) { if (objects == null) {
objects = new HashSet<>(); objects = new HashSet<>();
indexToCoreKey.put(index, objects); indexToCoreKey.put(index, objects);
} }
final boolean added = objects.add(coreKey); final boolean added = objects.add(coreKey);
assert added; assert added;
CoreClosedListener listener = ownerCoreCacheKey -> { IndexReader.ClosedListener listener = ownerCoreCacheKey -> {
assert coreKey == ownerCoreCacheKey; assert coreKey == ownerCoreCacheKey;
synchronized (ShardCoreKeyMap.this) { synchronized (ShardCoreKeyMap.this) {
coreKeyToShard.remove(ownerCoreCacheKey); coreKeyToShard.remove(ownerCoreCacheKey);
final Set<Object> coreKeys = indexToCoreKey.get(index); final Set<IndexReader.CacheKey> coreKeys = indexToCoreKey.get(index);
final boolean removed = coreKeys.remove(coreKey); final boolean removed = coreKeys.remove(coreKey);
assert removed; assert removed;
if (coreKeys.isEmpty()) { if (coreKeys.isEmpty()) {
@ -96,7 +100,7 @@ public final class ShardCoreKeyMap {
}; };
boolean addedListener = false; boolean addedListener = false;
try { try {
reader.addCoreClosedListener(listener); cacheHelper.addClosedListener(listener);
addedListener = true; addedListener = true;
// Only add the core key to the map as a last operation so that // Only add the core key to the map as a last operation so that
@ -131,7 +135,7 @@ public final class ShardCoreKeyMap {
* Get the set of core cache keys associated with the given index. * Get the set of core cache keys associated with the given index.
*/ */
public synchronized Set<Object> getCoreKeysForIndex(String index) { public synchronized Set<Object> getCoreKeysForIndex(String index) {
final Set<Object> objects = indexToCoreKey.get(index); final Set<IndexReader.CacheKey> objects = indexToCoreKey.get(index);
if (objects == null) { if (objects == null) {
return Collections.emptySet(); return Collections.emptySet();
} }
@ -154,9 +158,9 @@ public final class ShardCoreKeyMap {
if (assertionsEnabled == false) { if (assertionsEnabled == false) {
throw new AssertionError("only run this if assertions are enabled"); throw new AssertionError("only run this if assertions are enabled");
} }
Collection<Set<Object>> values = indexToCoreKey.values(); Collection<Set<IndexReader.CacheKey>> values = indexToCoreKey.values();
int size = 0; int size = 0;
for (Set<Object> value : values) { for (Set<IndexReader.CacheKey> value : values) {
size += value.size(); size += value.size();
} }
return size == coreKeyToShard.size(); return size == coreKeyToShard.size();

View file

@ -105,27 +105,17 @@ public final class AllTermQuery extends Query {
} }
@Override @Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
if (needsScores == false) { if (needsScores == false) {
return new TermQuery(term).createWeight(searcher, needsScores); return new TermQuery(term).createWeight(searcher, needsScores, boost);
} }
final TermContext termStates = TermContext.build(searcher.getTopReaderContext(), term); final TermContext termStates = TermContext.build(searcher.getTopReaderContext(), term);
final CollectionStatistics collectionStats = searcher.collectionStatistics(term.field()); final CollectionStatistics collectionStats = searcher.collectionStatistics(term.field());
final TermStatistics termStats = searcher.termStatistics(term, termStates); final TermStatistics termStats = searcher.termStatistics(term, termStates);
final Similarity similarity = searcher.getSimilarity(needsScores); final Similarity similarity = searcher.getSimilarity(needsScores);
final SimWeight stats = similarity.computeWeight(collectionStats, termStats); final SimWeight stats = similarity.computeWeight(boost, collectionStats, termStats);
return new Weight(this) { return new Weight(this) {
@Override
public float getValueForNormalization() throws IOException {
return stats.getValueForNormalization();
}
@Override
public void normalize(float norm, float topLevelBoost) {
stats.normalize(norm, topLevelBoost);
}
@Override @Override
public void extractTerms(Set<Term> terms) { public void extractTerms(Set<Term> terms) {
terms.add(term); terms.add(term);

View file

@ -49,6 +49,12 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
return this.shardId; return this.shardId;
} }
@Override
public CacheHelper getReaderCacheHelper() {
// safe to delegate since this reader does not alter the index
return in.getReaderCacheHelper();
}
@Override @Override
protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
return new ElasticsearchDirectoryReader(in, wrapper, shardId); return new ElasticsearchDirectoryReader(in, wrapper, shardId);
@ -84,14 +90,17 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader {
* @throws IllegalArgumentException if the reader doesn't contain an {@link ElasticsearchDirectoryReader} in it's hierarchy * @throws IllegalArgumentException if the reader doesn't contain an {@link ElasticsearchDirectoryReader} in it's hierarchy
*/ */
@SuppressForbidden(reason = "This is the only sane way to add a ReaderClosedListener") @SuppressForbidden(reason = "This is the only sane way to add a ReaderClosedListener")
public static void addReaderCloseListener(DirectoryReader reader, IndexReader.ReaderClosedListener listener) { public static void addReaderCloseListener(DirectoryReader reader, IndexReader.ClosedListener listener) {
ElasticsearchDirectoryReader elasticsearchDirectoryReader = getElasticsearchDirectoryReader(reader); ElasticsearchDirectoryReader elasticsearchDirectoryReader = getElasticsearchDirectoryReader(reader);
if (elasticsearchDirectoryReader != null) { if (elasticsearchDirectoryReader == null) {
assert reader.getCoreCacheKey() == elasticsearchDirectoryReader.getCoreCacheKey(); throw new IllegalArgumentException("Can't install close listener reader is not an ElasticsearchDirectoryReader/ElasticsearchLeafReader");
elasticsearchDirectoryReader.addReaderClosedListener(listener);
return;
} }
throw new IllegalArgumentException("Can't install close listener reader is not an ElasticsearchDirectoryReader/ElasticsearchLeafReader"); IndexReader.CacheHelper cacheHelper = elasticsearchDirectoryReader.getReaderCacheHelper();
if (cacheHelper == null) {
throw new IllegalArgumentException("Reader " + elasticsearchDirectoryReader + " does not support caching");
}
assert cacheHelper.getKey() == reader.getReaderCacheHelper().getKey();
cacheHelper.addClosedListener(listener);
} }
/** /**

View file

@ -49,8 +49,13 @@ public final class ElasticsearchLeafReader extends FilterLeafReader {
} }
@Override @Override
public Object getCoreCacheKey() { public CacheHelper getCoreCacheHelper() {
return in.getCoreCacheKey(); return in.getCoreCacheHelper();
}
@Override
public CacheHelper getReaderCacheHelper() {
return in.getReaderCacheHelper();
} }
public static ElasticsearchLeafReader getElasticsearchLeafReader(LeafReader reader) { public static ElasticsearchLeafReader getElasticsearchLeafReader(LeafReader reader) {

View file

@ -121,7 +121,6 @@ public class Queries {
if (isNegativeQuery(q)) { if (isNegativeQuery(q)) {
BooleanQuery bq = (BooleanQuery) q; BooleanQuery bq = (BooleanQuery) q;
BooleanQuery.Builder builder = new BooleanQuery.Builder(); BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setDisableCoord(bq.isCoordDisabled());
for (BooleanClause clause : bq) { for (BooleanClause clause : bq) {
builder.add(clause); builder.add(clause);
} }
@ -154,7 +153,6 @@ public class Queries {
int msm = calculateMinShouldMatch(optionalClauses, minimumShouldMatch); int msm = calculateMinShouldMatch(optionalClauses, minimumShouldMatch);
if (0 < msm) { if (0 < msm) {
BooleanQuery.Builder builder = new BooleanQuery.Builder(); BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.setDisableCoord(query.isCoordDisabled());
for (BooleanClause clause : query) { for (BooleanClause clause : query) {
builder.add(clause); builder.add(clause);
} }
@ -170,10 +168,7 @@ public class Queries {
* otherwise return the original query. * otherwise return the original query.
*/ */
public static Query maybeApplyMinimumShouldMatch(Query query, @Nullable String minimumShouldMatch) { public static Query maybeApplyMinimumShouldMatch(Query query, @Nullable String minimumShouldMatch) {
// If the coordination factor is disabled on a boolean query we don't apply the minimum should match. if (query instanceof BooleanQuery) {
// This is done to make sure that the minimum_should_match doesn't get applied when there is only one word
// and multiple variations of the same word in the query (synonyms for instance).
if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) {
return applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); return applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
} else if (query instanceof ExtendedCommonTermsQuery) { } else if (query instanceof ExtendedCommonTermsQuery) {
((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch); ((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch);

View file

@ -62,7 +62,7 @@ public class FieldValueFactorFunction extends ScoreFunction {
public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) { public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) {
final SortedNumericDoubleValues values; final SortedNumericDoubleValues values;
if(indexFieldData == null) { if(indexFieldData == null) {
values = FieldData.emptySortedNumericDoubles(ctx.reader().maxDoc()); values = FieldData.emptySortedNumericDoubles();
} else { } else {
values = this.indexFieldData.load(ctx).getDoubleValues(); values = this.indexFieldData.load(ctx).getDoubleValues();
} }
@ -70,16 +70,16 @@ public class FieldValueFactorFunction extends ScoreFunction {
return new LeafScoreFunction() { return new LeafScoreFunction() {
@Override @Override
public double score(int docId, float subQueryScore) { public double score(int docId, float subQueryScore) throws IOException {
values.setDocument(docId);
final int numValues = values.count();
double value; double value;
if (numValues > 0) { if (values.advanceExact(docId)) {
value = values.valueAt(0); value = values.nextValue();
} else if (missing != null) {
value = missing;
} else { } else {
throw new ElasticsearchException("Missing value for field [" + field + "]"); if (missing != null) {
value = missing;
} else {
throw new ElasticsearchException("Missing value for field [" + field + "]");
}
} }
double val = value * boostFactor; double val = value * boostFactor;
double result = modifier.apply(val); double result = modifier.apply(val);
@ -91,7 +91,7 @@ public class FieldValueFactorFunction extends ScoreFunction {
} }
@Override @Override
public Explanation explainScore(int docId, Explanation subQueryScore) { public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException {
String modifierStr = modifier != null ? modifier.toString() : ""; String modifierStr = modifier != null ? modifier.toString() : "";
String defaultStr = missing != null ? "?:" + missing : ""; String defaultStr = missing != null ? "?:" + missing : "";
double score = score(docId, subQueryScore.getValue()); double score = score(docId, subQueryScore.getValue());

View file

@ -135,9 +135,9 @@ public class FiltersFunctionScoreQuery extends Query {
} }
@Override @Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
if (needsScores == false && minScore == null) { if (needsScores == false && minScore == null) {
return subQuery.createWeight(searcher, needsScores); return subQuery.createWeight(searcher, needsScores, boost);
} }
boolean subQueryNeedsScores = combineFunction != CombineFunction.REPLACE; boolean subQueryNeedsScores = combineFunction != CombineFunction.REPLACE;
@ -146,7 +146,7 @@ public class FiltersFunctionScoreQuery extends Query {
subQueryNeedsScores |= filterFunctions[i].function.needsScores(); subQueryNeedsScores |= filterFunctions[i].function.needsScores();
filterWeights[i] = searcher.createNormalizedWeight(filterFunctions[i].filter, false); filterWeights[i] = searcher.createNormalizedWeight(filterFunctions[i].filter, false);
} }
Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores); Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores, boost);
return new CustomBoostFactorWeight(this, subQueryWeight, filterWeights, subQueryNeedsScores); return new CustomBoostFactorWeight(this, subQueryWeight, filterWeights, subQueryNeedsScores);
} }
@ -168,16 +168,6 @@ public class FiltersFunctionScoreQuery extends Query {
subQueryWeight.extractTerms(terms); subQueryWeight.extractTerms(terms);
} }
@Override
public float getValueForNormalization() throws IOException {
return subQueryWeight.getValueForNormalization();
}
@Override
public void normalize(float norm, float boost) {
subQueryWeight.normalize(norm, boost);
}
private FiltersFunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { private FiltersFunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(context); Scorer subQueryScorer = subQueryWeight.scorer(context);
if (subQueryScorer == null) { if (subQueryScorer == null) {
@ -281,7 +271,7 @@ public class FiltersFunctionScoreQuery extends Query {
return scoreCombiner.combine(subQueryScore, factor, maxBoost); return scoreCombiner.combine(subQueryScore, factor, maxBoost);
} }
protected double computeScore(int docId, float subQueryScore) { protected double computeScore(int docId, float subQueryScore) throws IOException {
double factor = 1d; double factor = 1d;
switch(scoreMode) { switch(scoreMode) {
case FIRST: case FIRST:

View file

@ -91,16 +91,16 @@ public class FunctionScoreQuery extends Query {
} }
@Override @Override
public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException {
if (needsScores == false && minScore == null) { if (needsScores == false && minScore == null) {
return subQuery.createWeight(searcher, needsScores); return subQuery.createWeight(searcher, needsScores, boost);
} }
boolean subQueryNeedsScores = boolean subQueryNeedsScores =
combineFunction != CombineFunction.REPLACE // if we don't replace we need the original score combineFunction != CombineFunction.REPLACE // if we don't replace we need the original score
|| function == null // when the function is null, we just multiply the score, so we need it || function == null // when the function is null, we just multiply the score, so we need it
|| function.needsScores(); // some scripts can replace with a script that returns eg. 1/_score || function.needsScores(); // some scripts can replace with a script that returns eg. 1/_score
Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores); Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores, boost);
return new CustomBoostFactorWeight(this, subQueryWeight, subQueryNeedsScores); return new CustomBoostFactorWeight(this, subQueryWeight, subQueryNeedsScores);
} }
@ -120,16 +120,6 @@ public class FunctionScoreQuery extends Query {
subQueryWeight.extractTerms(terms); subQueryWeight.extractTerms(terms);
} }
@Override
public float getValueForNormalization() throws IOException {
return subQueryWeight.getValueForNormalization();
}
@Override
public void normalize(float norm, float boost) {
subQueryWeight.normalize(norm, boost);
}
private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException {
Scorer subQueryScorer = subQueryWeight.scorer(context); Scorer subQueryScorer = subQueryWeight.scorer(context);
if (subQueryScorer == null) { if (subQueryScorer == null) {

View file

@ -26,7 +26,7 @@ import java.io.IOException;
/** Per-leaf {@link ScoreFunction}. */ /** Per-leaf {@link ScoreFunction}. */
public abstract class LeafScoreFunction { public abstract class LeafScoreFunction {
public abstract double score(int docId, float subQueryScore); public abstract double score(int docId, float subQueryScore) throws IOException;
public abstract Explanation explainScore(int docId, Explanation subQueryScore) throws IOException; public abstract Explanation explainScore(int docId, Explanation subQueryScore) throws IOException;

View file

@ -25,6 +25,7 @@ import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import java.io.IOException;
import java.util.Objects; import java.util.Objects;
/** /**
@ -68,14 +69,16 @@ public class RandomScoreFunction extends ScoreFunction {
return new LeafScoreFunction() { return new LeafScoreFunction() {
@Override @Override
public double score(int docId, float subQueryScore) { public double score(int docId, float subQueryScore) throws IOException {
uidByteData.setDocument(docId); if (uidByteData.advanceExact(docId) == false) {
int hash = StringHelper.murmurhash3_x86_32(uidByteData.valueAt(0), saltedSeed); throw new AssertionError("Document without a _uid");
}
int hash = StringHelper.murmurhash3_x86_32(uidByteData.nextValue(), saltedSeed);
return (hash & 0x00FFFFFF) / (float)(1 << 24); // only use the lower 24 bits to construct a float from 0.0-1.0 return (hash & 0x00FFFFFF) / (float)(1 << 24); // only use the lower 24 bits to construct a float from 0.0-1.0
} }
@Override @Override
public Explanation explainScore(int docId, Explanation subQueryScore) { public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException {
return Explanation.match( return Explanation.match(
CombineFunction.toFloat(score(docId, subQueryScore.getValue())), CombineFunction.toFloat(score(docId, subQueryScore.getValue())),
"random score function (seed: " + originalSeed + ")"); "random score function (seed: " + originalSeed + ")");

View file

@ -52,7 +52,7 @@ public class WeightFactorFunction extends ScoreFunction {
final LeafScoreFunction leafFunction = scoreFunction.getLeafScoreFunction(ctx); final LeafScoreFunction leafFunction = scoreFunction.getLeafScoreFunction(ctx);
return new LeafScoreFunction() { return new LeafScoreFunction() {
@Override @Override
public double score(int docId, float subQueryScore) { public double score(int docId, float subQueryScore) throws IOException {
return leafFunction.score(docId, subQueryScore) * getWeight(); return leafFunction.score(docId, subQueryScore) * getWeight();
} }

View file

@ -52,12 +52,7 @@ final class PerThreadIDVersionAndSeqNoLookup {
/** terms enum for uid field */ /** terms enum for uid field */
private final TermsEnum termsEnum; private final TermsEnum termsEnum;
/** _version data */
private final NumericDocValues versions;
/** _seq_no data */
private final NumericDocValues seqNos;
/** _primary_term data */
private final NumericDocValues primaryTerms;
/** Reused for iteration (when the term exists) */ /** Reused for iteration (when the term exists) */
private PostingsEnum docsEnum; private PostingsEnum docsEnum;
@ -72,30 +67,33 @@ final class PerThreadIDVersionAndSeqNoLookup {
Terms terms = fields.terms(UidFieldMapper.NAME); Terms terms = fields.terms(UidFieldMapper.NAME);
termsEnum = terms.iterator(); termsEnum = terms.iterator();
if (termsEnum == null) { if (termsEnum == null) {
throw new IllegalArgumentException("reader misses the [" + UidFieldMapper.NAME + throw new IllegalArgumentException("reader misses the [" + UidFieldMapper.NAME + "] field");
"] field");
} }
versions = reader.getNumericDocValues(VersionFieldMapper.NAME); if (reader.getNumericDocValues(VersionFieldMapper.NAME) == null) {
if (versions == null) { throw new IllegalArgumentException("reader misses the [" + VersionFieldMapper.NAME + "] field");
throw new IllegalArgumentException("reader misses the [" + VersionFieldMapper.NAME +
"] field");
} }
seqNos = reader.getNumericDocValues(SeqNoFieldMapper.NAME);
primaryTerms = reader.getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME);
Object readerKey = null; Object readerKey = null;
assert (readerKey = reader.getCoreCacheKey()) != null; assert (readerKey = reader.getCoreCacheHelper().getKey()) != null;
this.readerKey = readerKey; this.readerKey = readerKey;
} }
/** Return null if id is not found. */ /** Return null if id is not found. */
public DocIdAndVersion lookupVersion(BytesRef id, Bits liveDocs, LeafReaderContext context) public DocIdAndVersion lookupVersion(BytesRef id, Bits liveDocs, LeafReaderContext context)
throws IOException { throws IOException {
assert context.reader().getCoreCacheKey().equals(readerKey) : assert context.reader().getCoreCacheHelper().getKey().equals(readerKey) :
"context's reader is not the same as the reader class was initialized on."; "context's reader is not the same as the reader class was initialized on.";
int docID = getDocID(id, liveDocs); int docID = getDocID(id, liveDocs);
if (docID != DocIdSetIterator.NO_MORE_DOCS) { if (docID != DocIdSetIterator.NO_MORE_DOCS) {
return new DocIdAndVersion(docID, versions.get(docID), context); final NumericDocValues versions = context.reader().getNumericDocValues(VersionFieldMapper.NAME);
if (versions == null) {
throw new IllegalArgumentException("reader misses the [" + VersionFieldMapper.NAME + "] field");
}
if (versions.advanceExact(docID) == false) {
throw new IllegalArgumentException("Document [" + docID + "] misses the [" + VersionFieldMapper.NAME + "] field");
}
return new DocIdAndVersion(docID, versions.longValue(), context);
} else { } else {
return null; return null;
} }
@ -124,11 +122,18 @@ final class PerThreadIDVersionAndSeqNoLookup {
/** Return null if id is not found. */ /** Return null if id is not found. */
DocIdAndSeqNo lookupSeqNo(BytesRef id, Bits liveDocs, LeafReaderContext context) throws IOException { DocIdAndSeqNo lookupSeqNo(BytesRef id, Bits liveDocs, LeafReaderContext context) throws IOException {
assert context.reader().getCoreCacheKey().equals(readerKey) : assert context.reader().getCoreCacheHelper().getKey().equals(readerKey) :
"context's reader is not the same as the reader class was initialized on."; "context's reader is not the same as the reader class was initialized on.";
int docID = getDocID(id, liveDocs); int docID = getDocID(id, liveDocs);
if (docID != DocIdSetIterator.NO_MORE_DOCS) { if (docID != DocIdSetIterator.NO_MORE_DOCS) {
return new DocIdAndSeqNo(docID, seqNos == null ? SequenceNumbersService.UNASSIGNED_SEQ_NO : seqNos.get(docID), context); NumericDocValues seqNos = context.reader().getNumericDocValues(SeqNoFieldMapper.NAME);
long seqNo;
if (seqNos != null && seqNos.advanceExact(docID)) {
seqNo = seqNos.longValue();
} else {
seqNo = SequenceNumbersService.UNASSIGNED_SEQ_NO;
}
return new DocIdAndSeqNo(docID, seqNo, context);
} else { } else {
return null; return null;
} }
@ -139,7 +144,12 @@ final class PerThreadIDVersionAndSeqNoLookup {
* *
* Note that 0 is an illegal primary term. See {@link org.elasticsearch.cluster.metadata.IndexMetaData#primaryTerm(int)} * Note that 0 is an illegal primary term. See {@link org.elasticsearch.cluster.metadata.IndexMetaData#primaryTerm(int)}
**/ **/
long lookUpPrimaryTerm(int docID) throws IOException { long lookUpPrimaryTerm(int docID, LeafReader reader) throws IOException {
return primaryTerms == null ? 0 : primaryTerms.get(docID); NumericDocValues primaryTerms = reader.getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME);
if (primaryTerms != null && primaryTerms.advanceExact(docID)) {
return primaryTerms.longValue();
} else {
return 0;
}
} }
} }

View file

@ -21,7 +21,6 @@ package org.elasticsearch.common.lucene.uid;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReader.CoreClosedListener;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.util.CloseableThreadLocal; import org.apache.lucene.util.CloseableThreadLocal;
@ -41,7 +40,7 @@ public final class VersionsAndSeqNoResolver {
ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency();
// Evict this reader from lookupStates once it's closed: // Evict this reader from lookupStates once it's closed:
private static final CoreClosedListener removeLookupState = key -> { private static final IndexReader.ClosedListener removeLookupState = key -> {
CloseableThreadLocal<PerThreadIDVersionAndSeqNoLookup> ctl = lookupStates.remove(key); CloseableThreadLocal<PerThreadIDVersionAndSeqNoLookup> ctl = lookupStates.remove(key);
if (ctl != null) { if (ctl != null) {
ctl.close(); ctl.close();
@ -49,15 +48,15 @@ public final class VersionsAndSeqNoResolver {
}; };
private static PerThreadIDVersionAndSeqNoLookup getLookupState(LeafReader reader) throws IOException { private static PerThreadIDVersionAndSeqNoLookup getLookupState(LeafReader reader) throws IOException {
Object key = reader.getCoreCacheKey(); IndexReader.CacheHelper cacheHelper = reader.getCoreCacheHelper();
CloseableThreadLocal<PerThreadIDVersionAndSeqNoLookup> ctl = lookupStates.get(key); CloseableThreadLocal<PerThreadIDVersionAndSeqNoLookup> ctl = lookupStates.get(cacheHelper.getKey());
if (ctl == null) { if (ctl == null) {
// First time we are seeing this reader's core; make a new CTL: // First time we are seeing this reader's core; make a new CTL:
ctl = new CloseableThreadLocal<>(); ctl = new CloseableThreadLocal<>();
CloseableThreadLocal<PerThreadIDVersionAndSeqNoLookup> other = lookupStates.putIfAbsent(key, ctl); CloseableThreadLocal<PerThreadIDVersionAndSeqNoLookup> other = lookupStates.putIfAbsent(cacheHelper.getKey(), ctl);
if (other == null) { if (other == null) {
// Our CTL won, we must remove it when the core is closed: // Our CTL won, we must remove it when the core is closed:
reader.addCoreClosedListener(removeLookupState); cacheHelper.addClosedListener(removeLookupState);
} else { } else {
// Another thread beat us to it: just use their CTL: // Another thread beat us to it: just use their CTL:
ctl = other; ctl = other;
@ -161,7 +160,7 @@ public final class VersionsAndSeqNoResolver {
public static long loadPrimaryTerm(DocIdAndSeqNo docIdAndSeqNo) throws IOException { public static long loadPrimaryTerm(DocIdAndSeqNo docIdAndSeqNo) throws IOException {
LeafReader leaf = docIdAndSeqNo.context.reader(); LeafReader leaf = docIdAndSeqNo.context.reader();
PerThreadIDVersionAndSeqNoLookup lookup = getLookupState(leaf); PerThreadIDVersionAndSeqNoLookup lookup = getLookupState(leaf);
long result = lookup.lookUpPrimaryTerm(docIdAndSeqNo.docId); long result = lookup.lookUpPrimaryTerm(docIdAndSeqNo.docId, leaf);
assert result > 0 : "should always resolve a primary term for a resolved sequence number. primary_term [" + result + "]" assert result > 0 : "should always resolve a primary term for a resolved sequence number. primary_term [" + result + "]"
+ " docId [" + docIdAndSeqNo.docId + "] seqNo [" + docIdAndSeqNo.seqNo + "]"; + " docId [" + docIdAndSeqNo.docId + "] seqNo [" + docIdAndSeqNo.seqNo + "]";
return result; return result;

View file

@ -21,8 +21,8 @@ package org.elasticsearch.index.cache.bitset;
import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier; import org.apache.logging.log4j.util.Supplier;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
@ -71,13 +71,13 @@ import java.util.concurrent.Executor;
* and require that it should always be around should use this cache, otherwise the * and require that it should always be around should use this cache, otherwise the
* {@link org.elasticsearch.index.cache.query.QueryCache} should be used instead. * {@link org.elasticsearch.index.cache.query.QueryCache} should be used instead.
*/ */
public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener<Object, Cache<Query, BitsetFilterCache.Value>>, Closeable { public final class BitsetFilterCache extends AbstractIndexComponent implements IndexReader.ClosedListener, RemovalListener<IndexReader.CacheKey, Cache<Query, BitsetFilterCache.Value>>, Closeable {
public static final Setting<Boolean> INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING = public static final Setting<Boolean> INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING =
Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, Property.IndexScope); Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, Property.IndexScope);
private final boolean loadRandomAccessFiltersEagerly; private final boolean loadRandomAccessFiltersEagerly;
private final Cache<Object, Cache<Query, Value>> loadedFilters; private final Cache<IndexReader.CacheKey, Cache<Query, Value>> loadedFilters;
private final Listener listener; private final Listener listener;
public BitsetFilterCache(IndexSettings indexSettings, Listener listener) { public BitsetFilterCache(IndexSettings indexSettings, Listener listener) {
@ -86,7 +86,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
throw new IllegalArgumentException("listener must not be null"); throw new IllegalArgumentException("listener must not be null");
} }
this.loadRandomAccessFiltersEagerly = this.indexSettings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING); this.loadRandomAccessFiltersEagerly = this.indexSettings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING);
this.loadedFilters = CacheBuilder.<Object, Cache<Query, Value>>builder().removalListener(this).build(); this.loadedFilters = CacheBuilder.<IndexReader.CacheKey, Cache<Query, Value>>builder().removalListener(this).build();
this.listener = listener; this.listener = listener;
} }
@ -100,7 +100,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
} }
@Override @Override
public void onClose(Object ownerCoreCacheKey) { public void onClose(IndexReader.CacheKey ownerCoreCacheKey) {
loadedFilters.invalidate(ownerCoreCacheKey); loadedFilters.invalidate(ownerCoreCacheKey);
} }
@ -115,7 +115,11 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
} }
private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context) throws IOException, ExecutionException { private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context) throws IOException, ExecutionException {
final Object coreCacheReader = context.reader().getCoreCacheKey(); final IndexReader.CacheHelper cacheHelper = context.reader().getCoreCacheHelper();
if (cacheHelper == null) {
throw new IllegalArgumentException("Reader " + context.reader() + " does not support caching");
}
final IndexReader.CacheKey coreCacheReader = cacheHelper.getKey();
final ShardId shardId = ShardUtils.extractShardId(context.reader()); final ShardId shardId = ShardUtils.extractShardId(context.reader());
if (shardId != null // can't require it because of the percolator if (shardId != null // can't require it because of the percolator
&& indexSettings.getIndex().equals(shardId.getIndex()) == false) { && indexSettings.getIndex().equals(shardId.getIndex()) == false) {
@ -124,7 +128,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
+ " with cache of index " + indexSettings.getIndex()); + " with cache of index " + indexSettings.getIndex());
} }
Cache<Query, Value> filterToFbs = loadedFilters.computeIfAbsent(coreCacheReader, key -> { Cache<Query, Value> filterToFbs = loadedFilters.computeIfAbsent(coreCacheReader, key -> {
context.reader().addCoreClosedListener(BitsetFilterCache.this); cacheHelper.addClosedListener(BitsetFilterCache.this);
return CacheBuilder.<Query, Value>builder().build(); return CacheBuilder.<Query, Value>builder().build();
}); });
@ -148,7 +152,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
} }
@Override @Override
public void onRemoval(RemovalNotification<Object, Cache<Query, Value>> notification) { public void onRemoval(RemovalNotification<IndexReader.CacheKey, Cache<Query, Value>> notification) {
if (notification.getKey() == null) { if (notification.getKey() == null) {
return; return;
} }
@ -272,7 +276,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L
} }
Cache<Object, Cache<Query, Value>> getLoadedFilters() { Cache<IndexReader.CacheKey, Cache<Query, Value>> getLoadedFilters() {
return loadedFilters; return loadedFilters;
} }

View file

@ -22,7 +22,7 @@ package org.elasticsearch.index.codec;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
import org.apache.lucene.codecs.lucene62.Lucene62Codec; import org.apache.lucene.codecs.lucene70.Lucene70Codec;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
@ -47,8 +47,8 @@ public class CodecService {
public CodecService(@Nullable MapperService mapperService, Logger logger) { public CodecService(@Nullable MapperService mapperService, Logger logger) {
final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder(); final MapBuilder<String, Codec> codecs = MapBuilder.<String, Codec>newMapBuilder();
if (mapperService == null) { if (mapperService == null) {
codecs.put(DEFAULT_CODEC, new Lucene62Codec()); codecs.put(DEFAULT_CODEC, new Lucene70Codec());
codecs.put(BEST_COMPRESSION_CODEC, new Lucene62Codec(Mode.BEST_COMPRESSION)); codecs.put(BEST_COMPRESSION_CODEC, new Lucene70Codec(Mode.BEST_COMPRESSION));
} else { } else {
codecs.put(DEFAULT_CODEC, codecs.put(DEFAULT_CODEC,
new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger));

View file

@ -23,7 +23,7 @@ import org.apache.logging.log4j.Logger;
import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.Codec;
import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
import org.apache.lucene.codecs.lucene62.Lucene62Codec; import org.apache.lucene.codecs.lucene70.Lucene70Codec;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.MapperService;
* configured for a specific field the default postings format is used. * configured for a specific field the default postings format is used.
*/ */
// LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version // LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version
public class PerFieldMappingPostingFormatCodec extends Lucene62Codec { public class PerFieldMappingPostingFormatCodec extends Lucene70Codec {
private final Logger logger; private final Logger logger;
private final MapperService mapperService; private final MapperService mapperService;

View file

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #binaryValue()}.
*/
public abstract class AbstractBinaryDocValues extends BinaryDocValues {
@Override
public int docID() {
throw new UnsupportedOperationException();
}
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
}

View file

@ -0,0 +1,50 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #longValue()}.
*/
public abstract class AbstractNumericDocValues extends NumericDocValues {
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
}

View file

@ -19,30 +19,32 @@
package org.elasticsearch.index.fielddata; package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/** /**
* Base implementation of a {@link RandomAccessOrds} instance. * Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #ordValue()}.
*/ */
public abstract class AbstractRandomAccessOrds extends RandomAccessOrds { public abstract class AbstractSortedDocValues extends SortedDocValues {
int i = 0;
protected abstract void doSetDocument(int docID);
@Override @Override
public final void setDocument(int docID) { public int nextDoc() throws IOException {
doSetDocument(docID); throw new UnsupportedOperationException();
i = 0;
} }
@Override @Override
public long nextOrd() { public int advance(int target) throws IOException {
if (i < cardinality()) { throw new UnsupportedOperationException();
return ordAt(i++); }
} else {
return NO_MORE_ORDS; @Override
} public long cost() {
throw new UnsupportedOperationException();
} }
} }

View file

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #docValueCount()} and {@link #nextValue()}.
*/
public abstract class AbstractSortedNumericDocValues extends SortedNumericDocValues {
@Override
public int docID() {
throw new UnsupportedOperationException();
}
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
}

View file

@ -0,0 +1,56 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #getValueCount()} and {@link #nextOrd()} and {@link #lookupOrd(long)}.
*/
public abstract class AbstractSortedSetDocValues extends SortedSetDocValues {
@Override
public int docID() {
throw new UnsupportedOperationException();
}
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
}

View file

@ -0,0 +1,54 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.search.DocIdSetIterator;
import java.io.IOException;
/**
* Base implementation that throws an {@link IOException} for the
* {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and
* aggregations, which only use {@link #advanceExact(int)} and
* {@link #docValueCount()} and {@link #nextValue()}.
*/
public abstract class AbstractSortingNumericDocValues extends SortingNumericDocValues {
@Override
public int docID() {
throw new UnsupportedOperationException();
}
@Override
public int nextDoc() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int advance(int target) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public long cost() {
throw new UnsupportedOperationException();
}
}

View file

@ -19,7 +19,7 @@
package org.elasticsearch.index.fielddata; package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues;
/** /**
* Specialization of {@link AtomicFieldData} for data that is indexed with * Specialization of {@link AtomicFieldData} for data that is indexed with
@ -30,6 +30,6 @@ public interface AtomicOrdinalsFieldData extends AtomicFieldData {
/** /**
* Return the ordinals values for the current atomic reader. * Return the ordinals values for the current atomic reader.
*/ */
RandomAccessOrds getOrdinalsValues(); SortedSetDocValues getOrdinalsValues();
} }

View file

@ -22,15 +22,13 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
@ -43,8 +41,8 @@ public enum FieldData {
/** /**
* Return a {@link SortedBinaryDocValues} that doesn't contain any value. * Return a {@link SortedBinaryDocValues} that doesn't contain any value.
*/ */
public static SortedBinaryDocValues emptySortedBinary(int maxDoc) { public static SortedBinaryDocValues emptySortedBinary() {
return singleton(DocValues.emptyBinary(), new Bits.MatchNoBits(maxDoc)); return singleton(DocValues.emptyBinary());
} }
/** /**
@ -53,8 +51,13 @@ public enum FieldData {
public static NumericDoubleValues emptyNumericDouble() { public static NumericDoubleValues emptyNumericDouble() {
return new NumericDoubleValues() { return new NumericDoubleValues() {
@Override @Override
public double get(int docID) { public boolean advanceExact(int doc) throws IOException {
return 0; return false;
}
@Override
public double doubleValue() throws IOException {
throw new UnsupportedOperationException();
} }
}; };
@ -63,16 +66,20 @@ public enum FieldData {
/** /**
* Return a {@link SortedNumericDoubleValues} that doesn't contain any value. * Return a {@link SortedNumericDoubleValues} that doesn't contain any value.
*/ */
public static SortedNumericDoubleValues emptySortedNumericDoubles(int maxDoc) { public static SortedNumericDoubleValues emptySortedNumericDoubles() {
return singleton(emptyNumericDouble(), new Bits.MatchNoBits(maxDoc)); return singleton(emptyNumericDouble());
} }
public static GeoPointValues emptyGeoPoint() { public static GeoPointValues emptyGeoPoint() {
final GeoPoint point = new GeoPoint();
return new GeoPointValues() { return new GeoPointValues() {
@Override @Override
public GeoPoint get(int docID) { public boolean advanceExact(int doc) throws IOException {
return point; return false;
}
@Override
public GeoPoint geoPointValue() {
throw new UnsupportedOperationException();
} }
}; };
} }
@ -80,68 +87,123 @@ public enum FieldData {
/** /**
* Return a {@link SortedNumericDoubleValues} that doesn't contain any value. * Return a {@link SortedNumericDoubleValues} that doesn't contain any value.
*/ */
public static MultiGeoPointValues emptyMultiGeoPoints(int maxDoc) { public static MultiGeoPointValues emptyMultiGeoPoints() {
return singleton(emptyGeoPoint(), new Bits.MatchNoBits(maxDoc)); return singleton(emptyGeoPoint());
} }
/** /**
* Returns a {@link Bits} representing all documents from <code>dv</code> that have a value. * Returns a {@link Bits} representing all documents from <code>dv</code> that have a value.
*/ */
public static Bits docsWithValue(final SortedBinaryDocValues dv, final int maxDoc) { public static Bits docsWithValue(final SortedBinaryDocValues dv, final int maxDoc) {
return new Bits() { return new Bits() {
@Override @Override
public boolean get(int index) { public boolean get(int index) {
dv.setDocument(index); try {
return dv.count() != 0; return dv.advanceExact(index);
} } catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override @Override
public int length() { public int length() {
return maxDoc; return maxDoc;
} }
}; };
} }
/** /**
* Returns a Bits representing all documents from <code>dv</code> that have a value. * Returns a {@link Bits} representing all documents from <code>dv</code>
* that have a value.
*/
public static Bits docsWithValue(final SortedSetDocValues dv, final int maxDoc) {
return new Bits() {
@Override
public boolean get(int index) {
try {
return dv.advanceExact(index);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public int length() {
return maxDoc;
}
};
}
/**
* Returns a Bits representing all documents from <code>dv</code> that have
* a value.
*/ */
public static Bits docsWithValue(final MultiGeoPointValues dv, final int maxDoc) { public static Bits docsWithValue(final MultiGeoPointValues dv, final int maxDoc) {
return new Bits() { return new Bits() {
@Override @Override
public boolean get(int index) { public boolean get(int index) {
dv.setDocument(index); try {
return dv.count() != 0; return dv.advanceExact(index);
} } catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override @Override
public int length() { public int length() {
return maxDoc; return maxDoc;
} }
}; };
} }
/** /**
* Returns a Bits representing all documents from <code>dv</code> that have a value. * Returns a Bits representing all documents from <code>dv</code> that have a value.
*/ */
public static Bits docsWithValue(final SortedNumericDoubleValues dv, final int maxDoc) { public static Bits docsWithValue(final SortedNumericDoubleValues dv, final int maxDoc) {
return new Bits() { return new Bits() {
@Override @Override
public boolean get(int index) { public boolean get(int index) {
dv.setDocument(index); try {
return dv.count() != 0; return dv.advanceExact(index);
} } catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override @Override
public int length() { public int length() {
return maxDoc; return maxDoc;
} }
}; };
} }
/** /**
* Given a {@link SortedNumericDoubleValues}, return a {@link SortedNumericDocValues} * Returns a Bits representing all documents from <code>dv</code> that have
* instance that will translate double values to sortable long bits using * a value.
* {@link NumericUtils#doubleToSortableLong(double)}. */
public static Bits docsWithValue(final SortedNumericDocValues dv, final int maxDoc) {
return new Bits() {
@Override
public boolean get(int index) {
try {
return dv.advanceExact(index);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
@Override
public int length() {
return maxDoc;
}
};
}
/**
* Given a {@link SortedNumericDoubleValues}, return a
* {@link SortedNumericDocValues} instance that will translate double values
* to sortable long bits using
* {@link org.apache.lucene.util.NumericUtils#doubleToSortableLong(double)}.
*/ */
public static SortedNumericDocValues toSortableLongBits(SortedNumericDoubleValues values) { public static SortedNumericDocValues toSortableLongBits(SortedNumericDoubleValues values) {
final NumericDoubleValues singleton = unwrapSingleton(values); final NumericDoubleValues singleton = unwrapSingleton(values);
@ -152,8 +214,7 @@ public enum FieldData {
} else { } else {
longBits = new SortableLongBitsNumericDocValues(singleton); longBits = new SortableLongBitsNumericDocValues(singleton);
} }
final Bits docsWithField = unwrapSingletonBits(values); return DocValues.singleton(longBits);
return DocValues.singleton(longBits, docsWithField);
} else { } else {
if (values instanceof SortableLongBitsToSortedNumericDoubleValues) { if (values instanceof SortableLongBitsToSortedNumericDoubleValues) {
return ((SortableLongBitsToSortedNumericDoubleValues) values).getLongValues(); return ((SortableLongBitsToSortedNumericDoubleValues) values).getLongValues();
@ -166,7 +227,7 @@ public enum FieldData {
/** /**
* Given a {@link SortedNumericDocValues}, return a {@link SortedNumericDoubleValues} * Given a {@link SortedNumericDocValues}, return a {@link SortedNumericDoubleValues}
* instance that will translate long values to doubles using * instance that will translate long values to doubles using
* {@link NumericUtils#sortableLongToDouble(long)}. * {@link org.apache.lucene.util.NumericUtils#sortableLongToDouble(long)}.
*/ */
public static SortedNumericDoubleValues sortableLongBitsToDoubles(SortedNumericDocValues values) { public static SortedNumericDoubleValues sortableLongBitsToDoubles(SortedNumericDocValues values) {
final NumericDocValues singleton = DocValues.unwrapSingleton(values); final NumericDocValues singleton = DocValues.unwrapSingleton(values);
@ -177,8 +238,7 @@ public enum FieldData {
} else { } else {
doubles = new SortableLongBitsToNumericDoubleValues(singleton); doubles = new SortableLongBitsToNumericDoubleValues(singleton);
} }
final Bits docsWithField = DocValues.unwrapSingletonBits(values); return singleton(doubles);
return singleton(doubles, docsWithField);
} else { } else {
if (values instanceof SortableLongBitsSortedNumericDocValues) { if (values instanceof SortableLongBitsSortedNumericDocValues) {
return ((SortableLongBitsSortedNumericDocValues) values).getDoubleValues(); return ((SortableLongBitsSortedNumericDocValues) values).getDoubleValues();
@ -194,8 +254,7 @@ public enum FieldData {
public static SortedNumericDoubleValues castToDouble(final SortedNumericDocValues values) { public static SortedNumericDoubleValues castToDouble(final SortedNumericDocValues values) {
final NumericDocValues singleton = DocValues.unwrapSingleton(values); final NumericDocValues singleton = DocValues.unwrapSingleton(values);
if (singleton != null) { if (singleton != null) {
final Bits docsWithField = DocValues.unwrapSingletonBits(values); return singleton(new DoubleCastedValues(singleton));
return singleton(new DoubleCastedValues(singleton), docsWithField);
} else { } else {
return new SortedDoubleCastedValues(values); return new SortedDoubleCastedValues(values);
} }
@ -207,8 +266,7 @@ public enum FieldData {
public static SortedNumericDocValues castToLong(final SortedNumericDoubleValues values) { public static SortedNumericDocValues castToLong(final SortedNumericDoubleValues values) {
final NumericDoubleValues singleton = unwrapSingleton(values); final NumericDoubleValues singleton = unwrapSingleton(values);
if (singleton != null) { if (singleton != null) {
final Bits docsWithField = unwrapSingletonBits(values); return DocValues.singleton(new LongCastedValues(singleton));
return DocValues.singleton(new LongCastedValues(singleton), docsWithField);
} else { } else {
return new SortedLongCastedValues(values); return new SortedLongCastedValues(values);
} }
@ -217,15 +275,14 @@ public enum FieldData {
/** /**
* Returns a multi-valued view over the provided {@link NumericDoubleValues}. * Returns a multi-valued view over the provided {@link NumericDoubleValues}.
*/ */
public static SortedNumericDoubleValues singleton(NumericDoubleValues values, Bits docsWithField) { public static SortedNumericDoubleValues singleton(NumericDoubleValues values) {
return new SingletonSortedNumericDoubleValues(values, docsWithField); return new SingletonSortedNumericDoubleValues(values);
} }
/** /**
* Returns a single-valued view of the {@link SortedNumericDoubleValues}, * Returns a single-valued view of the {@link SortedNumericDoubleValues},
* if it was previously wrapped with {@link DocValues#singleton(NumericDocValues, Bits)}, * if it was previously wrapped with {@link DocValues#singleton(NumericDocValues)},
* or null. * or null.
* @see DocValues#unwrapSingletonBits(SortedNumericDocValues)
*/ */
public static NumericDoubleValues unwrapSingleton(SortedNumericDoubleValues values) { public static NumericDoubleValues unwrapSingleton(SortedNumericDoubleValues values) {
if (values instanceof SingletonSortedNumericDoubleValues) { if (values instanceof SingletonSortedNumericDoubleValues) {
@ -234,31 +291,17 @@ public enum FieldData {
return null; return null;
} }
/**
* Returns the documents with a value for the {@link SortedNumericDoubleValues},
* if it was previously wrapped with {@link #singleton(NumericDoubleValues, Bits)},
* or null.
*/
public static Bits unwrapSingletonBits(SortedNumericDoubleValues dv) {
if (dv instanceof SingletonSortedNumericDoubleValues) {
return ((SingletonSortedNumericDoubleValues)dv).getDocsWithField();
} else {
return null;
}
}
/** /**
* Returns a multi-valued view over the provided {@link GeoPointValues}. * Returns a multi-valued view over the provided {@link GeoPointValues}.
*/ */
public static MultiGeoPointValues singleton(GeoPointValues values, Bits docsWithField) { public static MultiGeoPointValues singleton(GeoPointValues values) {
return new SingletonMultiGeoPointValues(values, docsWithField); return new SingletonMultiGeoPointValues(values);
} }
/** /**
* Returns a single-valued view of the {@link MultiGeoPointValues}, * Returns a single-valued view of the {@link MultiGeoPointValues},
* if it was previously wrapped with {@link #singleton(GeoPointValues, Bits)}, * if it was previously wrapped with {@link #singleton(GeoPointValues)},
* or null. * or null.
* @see #unwrapSingletonBits(MultiGeoPointValues)
*/ */
public static GeoPointValues unwrapSingleton(MultiGeoPointValues values) { public static GeoPointValues unwrapSingleton(MultiGeoPointValues values) {
if (values instanceof SingletonMultiGeoPointValues) { if (values instanceof SingletonMultiGeoPointValues) {
@ -267,30 +310,17 @@ public enum FieldData {
return null; return null;
} }
/**
* Returns the documents with a value for the {@link MultiGeoPointValues},
* if it was previously wrapped with {@link #singleton(GeoPointValues, Bits)},
* or null.
*/
public static Bits unwrapSingletonBits(MultiGeoPointValues values) {
if (values instanceof SingletonMultiGeoPointValues) {
return ((SingletonMultiGeoPointValues) values).getDocsWithField();
}
return null;
}
/** /**
* Returns a multi-valued view over the provided {@link BinaryDocValues}. * Returns a multi-valued view over the provided {@link BinaryDocValues}.
*/ */
public static SortedBinaryDocValues singleton(BinaryDocValues values, Bits docsWithField) { public static SortedBinaryDocValues singleton(BinaryDocValues values) {
return new SingletonSortedBinaryDocValues(values, docsWithField); return new SingletonSortedBinaryDocValues(values);
} }
/** /**
* Returns a single-valued view of the {@link SortedBinaryDocValues}, * Returns a single-valued view of the {@link SortedBinaryDocValues},
* if it was previously wrapped with {@link #singleton(BinaryDocValues, Bits)}, * if it was previously wrapped with {@link #singleton(BinaryDocValues)},
* or null. * or null.
* @see #unwrapSingletonBits(SortedBinaryDocValues)
*/ */
public static BinaryDocValues unwrapSingleton(SortedBinaryDocValues values) { public static BinaryDocValues unwrapSingleton(SortedBinaryDocValues values) {
if (values instanceof SingletonSortedBinaryDocValues) { if (values instanceof SingletonSortedBinaryDocValues) {
@ -299,18 +329,6 @@ public enum FieldData {
return null; return null;
} }
/**
* Returns the documents with a value for the {@link SortedBinaryDocValues},
* if it was previously wrapped with {@link #singleton(BinaryDocValues, Bits)},
* or null.
*/
public static Bits unwrapSingletonBits(SortedBinaryDocValues values) {
if (values instanceof SingletonSortedBinaryDocValues) {
return ((SingletonSortedBinaryDocValues) values).getDocsWithField();
}
return null;
}
/** /**
* Returns whether the provided values *might* be multi-valued. There is no * Returns whether the provided values *might* be multi-valued. There is no
* guarantee that this method will return <tt>false</tt> in the single-valued case. * guarantee that this method will return <tt>false</tt> in the single-valued case.
@ -359,10 +377,13 @@ public enum FieldData {
public static SortedBinaryDocValues toString(final SortedNumericDocValues values) { public static SortedBinaryDocValues toString(final SortedNumericDocValues values) {
return toString(new ToStringValues() { return toString(new ToStringValues() {
@Override @Override
public void get(int docID, List<CharSequence> list) { public boolean advanceExact(int doc) throws IOException {
values.setDocument(docID); return values.advanceExact(doc);
for (int i = 0, count = values.count(); i < count; ++i) { }
list.add(Long.toString(values.valueAt(i))); @Override
public void get(List<CharSequence> list) throws IOException {
for (int i = 0, count = values.docValueCount(); i < count; ++i) {
list.add(Long.toString(values.nextValue()));
} }
} }
}); });
@ -376,10 +397,13 @@ public enum FieldData {
public static SortedBinaryDocValues toString(final SortedNumericDoubleValues values) { public static SortedBinaryDocValues toString(final SortedNumericDoubleValues values) {
return toString(new ToStringValues() { return toString(new ToStringValues() {
@Override @Override
public void get(int docID, List<CharSequence> list) { public boolean advanceExact(int doc) throws IOException {
values.setDocument(docID); return values.advanceExact(doc);
for (int i = 0, count = values.count(); i < count; ++i) { }
list.add(Double.toString(values.valueAt(i))); @Override
public void get(List<CharSequence> list) throws IOException {
for (int i = 0, count = values.docValueCount(); i < count; ++i) {
list.add(Double.toString(values.nextValue()));
} }
} }
}); });
@ -390,23 +414,37 @@ public enum FieldData {
* typically used for scripts or for the `map` execution mode of terms aggs. * typically used for scripts or for the `map` execution mode of terms aggs.
* NOTE: this is slow! * NOTE: this is slow!
*/ */
public static SortedBinaryDocValues toString(final RandomAccessOrds values) { public static SortedBinaryDocValues toString(final SortedSetDocValues values) {
return new SortedBinaryDocValues() { return new SortedBinaryDocValues() {
private int count = 0;
@Override @Override
public BytesRef valueAt(int index) { public boolean advanceExact(int doc) throws IOException {
return values.lookupOrd(values.ordAt(index)); if (values.advanceExact(doc) == false) {
return false;
}
for (int i = 0; ; ++i) {
if (values.nextOrd() == SortedSetDocValues.NO_MORE_ORDS) {
count = i;
break;
}
}
// reset the iterator on the current doc
boolean advanced = values.advanceExact(doc);
assert advanced;
return true;
} }
@Override @Override
public void setDocument(int docId) { public int docValueCount() {
values.setDocument(docId); return count;
} }
@Override @Override
public int count() { public BytesRef nextValue() throws IOException {
return values.cardinality(); return values.lookupOrd(values.nextOrd());
} }
}; };
} }
@ -418,78 +456,30 @@ public enum FieldData {
public static SortedBinaryDocValues toString(final MultiGeoPointValues values) { public static SortedBinaryDocValues toString(final MultiGeoPointValues values) {
return toString(new ToStringValues() { return toString(new ToStringValues() {
@Override @Override
public void get(int docID, List<CharSequence> list) { public boolean advanceExact(int doc) throws IOException {
values.setDocument(docID); return values.advanceExact(doc);
for (int i = 0, count = values.count(); i < count; ++i) { }
list.add(values.valueAt(i).toString()); @Override
public void get(List<CharSequence> list) throws IOException {
for (int i = 0, count = values.docValueCount(); i < count; ++i) {
list.add(values.nextValue().toString());
} }
} }
}); });
} }
/**
* If <code>dv</code> is an instance of {@link RandomAccessOrds}, then return
* it, otherwise wrap it into a slow wrapper that implements random access.
*/
public static RandomAccessOrds maybeSlowRandomAccessOrds(final SortedSetDocValues dv) {
if (dv instanceof RandomAccessOrds) {
return (RandomAccessOrds) dv;
} else {
assert DocValues.unwrapSingleton(dv) == null : "this method expect singleton to return random-access ords";
return new RandomAccessOrds() {
int cardinality;
long[] ords = new long[0];
int ord;
@Override
public void setDocument(int docID) {
cardinality = 0;
dv.setDocument(docID);
for (long ord = dv.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = dv.nextOrd()) {
ords = ArrayUtil.grow(ords, cardinality + 1);
ords[cardinality++] = ord;
}
ord = 0;
}
@Override
public long nextOrd() {
return ords[ord++];
}
@Override
public BytesRef lookupOrd(long ord) {
return dv.lookupOrd(ord);
}
@Override
public long getValueCount() {
return dv.getValueCount();
}
@Override
public long ordAt(int index) {
return ords[index];
}
@Override
public int cardinality() {
return cardinality;
}
};
}
}
private static SortedBinaryDocValues toString(final ToStringValues toStringValues) { private static SortedBinaryDocValues toString(final ToStringValues toStringValues) {
return new SortingBinaryDocValues() { return new SortingBinaryDocValues() {
final List<CharSequence> list = new ArrayList<>(); final List<CharSequence> list = new ArrayList<>();
@Override @Override
public void setDocument(int docID) { public boolean advanceExact(int docID) throws IOException {
if (toStringValues.advanceExact(docID) == false) {
return false;
}
list.clear(); list.clear();
toStringValues.get(docID, list); toStringValues.get(list);
count = list.size(); count = list.size();
grow(); grow();
for (int i = 0; i < count; ++i) { for (int i = 0; i < count; ++i) {
@ -497,6 +487,7 @@ public enum FieldData {
values[i].copyChars(s); values[i].copyChars(s);
} }
sort(); sort();
return true;
} }
}; };
@ -504,7 +495,14 @@ public enum FieldData {
private interface ToStringValues { private interface ToStringValues {
void get(int docID, List<CharSequence> values); /**
* Advance this instance to the given document id
* @return true if there is a value for this document
*/
boolean advanceExact(int doc) throws IOException;
/** Fill the list of charsquences with the list of values for the current document. */
void get(List<CharSequence> values) throws IOException;
} }
@ -517,8 +515,13 @@ public enum FieldData {
} }
@Override @Override
public double get(int docID) { public double doubleValue() throws IOException {
return values.get(docID); return values.longValue();
}
@Override
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
} }
} }
@ -532,38 +535,49 @@ public enum FieldData {
} }
@Override @Override
public double valueAt(int index) { public boolean advanceExact(int target) throws IOException {
return values.valueAt(index); return values.advanceExact(target);
} }
@Override @Override
public void setDocument(int doc) { public double nextValue() throws IOException {
values.setDocument(doc); return values.nextValue();
} }
@Override @Override
public int count() { public int docValueCount() {
return values.count(); return values.docValueCount();
} }
} }
private static class LongCastedValues extends NumericDocValues { private static class LongCastedValues extends AbstractNumericDocValues {
private final NumericDoubleValues values; private final NumericDoubleValues values;
private int docID = -1;
LongCastedValues(NumericDoubleValues values) { LongCastedValues(NumericDoubleValues values) {
this.values = values; this.values = values;
} }
@Override @Override
public long get(int docID) { public boolean advanceExact(int target) throws IOException {
return (long) values.get(docID); docID = target;
return values.advanceExact(target);
} }
@Override
public long longValue() throws IOException {
return (long) values.doubleValue();
}
@Override
public int docID() {
return docID;
}
} }
private static class SortedLongCastedValues extends SortedNumericDocValues { private static class SortedLongCastedValues extends AbstractSortedNumericDocValues {
private final SortedNumericDoubleValues values; private final SortedNumericDoubleValues values;
@ -572,18 +586,18 @@ public enum FieldData {
} }
@Override @Override
public long valueAt(int index) { public boolean advanceExact(int target) throws IOException {
return (long) values.valueAt(index); return values.advanceExact(target);
} }
@Override @Override
public void setDocument(int doc) { public int docValueCount() {
values.setDocument(doc); return values.docValueCount();
} }
@Override @Override
public int count() { public long nextValue() throws IOException {
return values.count(); return (long) values.nextValue();
} }
} }

View file

@ -21,17 +21,23 @@ package org.elasticsearch.index.fielddata;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import java.io.IOException;
/** /**
* Per-document geo-point values. * Per-document geo-point values.
*/ */
public abstract class GeoPointValues { public abstract class GeoPointValues {
/** /**
* Get the {@link GeoPoint} associated with <code>docID</code>. * Advance this instance to the given document id
* The returned {@link GeoPoint} might be reused across calls. * @return true if there is a value for this document
* If the given <code>docID</code> does not have a value then the returned
* geo point mught have both latitude and longitude set to 0.
*/ */
public abstract GeoPoint get(int docID); public abstract boolean advanceExact(int doc) throws IOException;
/**
* Get the {@link GeoPoint} associated with the current document.
* The returned {@link GeoPoint} might be reused across calls.
*/
public abstract GeoPoint geoPointValue();
} }

View file

@ -20,6 +20,8 @@ package org.elasticsearch.index.fielddata;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import java.io.IOException;
/** /**
* A stateful lightweight per document set of {@link GeoPoint} values. * A stateful lightweight per document set of {@link GeoPoint} values.
* To iterate over values in a document use the following pattern: * To iterate over values in a document use the following pattern:
@ -44,28 +46,24 @@ public abstract class MultiGeoPointValues {
} }
/** /**
* Sets iteration to the specified docID. * Advance this instance to the given document id
* @param docId document ID * @return true if there is a value for this document
*
* @see #valueAt(int)
* @see #count()
*/ */
public abstract void setDocument(int docId); public abstract boolean advanceExact(int doc) throws IOException;
/** /**
* Return the number of geo points the current document has. * Return the number of geo points the current document has.
*/ */
public abstract int count(); public abstract int docValueCount();
/** /**
* Return the <code>i-th</code> value associated with the current document. * Return the next value associated with the current document. This must not be
* Behavior is undefined when <code>i</code> is undefined or greater than * called more than {@link #docValueCount()} times.
* or equal to {@link #count()}.
* *
* Note: the returned {@link GeoPoint} might be shared across invocations. * Note: the returned {@link GeoPoint} might be shared across invocations.
* *
* @return the next value for the current docID set to {@link #setDocument(int)}. * @return the next value for the current docID set to {@link #advanceExact(int)}.
*/ */
public abstract GeoPoint valueAt(int i); public abstract GeoPoint nextValue() throws IOException;
} }

View file

@ -20,43 +20,58 @@
package org.elasticsearch.index.fielddata; package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.DoubleValues;
import java.io.IOException;
/** /**
* A per-document numeric value. * A per-document numeric value.
*/ */
public abstract class NumericDoubleValues { public abstract class NumericDoubleValues extends DoubleValues {
/** Sole constructor. (For invocation by subclass /** Sole constructor. (For invocation by subclass
* constructors, typically implicit.) */ * constructors, typically implicit.) */
protected NumericDoubleValues() {} protected NumericDoubleValues() {}
/**
* Returns the numeric value for the specified document ID. This must return
* <tt>0d</tt> if the given doc ID has no value.
* @param docID document ID to lookup
* @return numeric value
*/
public abstract double get(int docID);
// TODO: this interaction with sort comparators is really ugly... // TODO: this interaction with sort comparators is really ugly...
/** Returns numeric docvalues view of raw double bits */ /** Returns numeric docvalues view of raw double bits */
public NumericDocValues getRawDoubleValues() { public NumericDocValues getRawDoubleValues() {
return new NumericDocValues() { return new AbstractNumericDocValues() {
@Override private int docID = -1;
public long get(int docID) { @Override
return Double.doubleToRawLongBits(NumericDoubleValues.this.get(docID)); public boolean advanceExact(int target) throws IOException {
} docID = target;
return NumericDoubleValues.this.advanceExact(target);
}
@Override
public long longValue() throws IOException {
return Double.doubleToRawLongBits(NumericDoubleValues.this.doubleValue());
}
@Override
public int docID() {
return docID;
}
}; };
} }
// yes... this is doing what the previous code was doing... // yes... this is doing what the previous code was doing...
/** Returns numeric docvalues view of raw float bits */ /** Returns numeric docvalues view of raw float bits */
public NumericDocValues getRawFloatValues() { public NumericDocValues getRawFloatValues() {
return new NumericDocValues() { return new AbstractNumericDocValues() {
@Override private int docID = -1;
public long get(int docID) { @Override
return Float.floatToRawIntBits((float)NumericDoubleValues.this.get(docID)); public boolean advanceExact(int target) throws IOException {
} docID = target;
return NumericDoubleValues.this.advanceExact(target);
}
@Override
public long longValue() throws IOException {
return Float.floatToRawIntBits((float)NumericDoubleValues.this.doubleValue());
}
@Override
public int docID() {
return docID;
}
}; };
} }
} }

View file

@ -21,7 +21,9 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
@ -32,7 +34,9 @@ import org.joda.time.DateTimeZone;
import org.joda.time.MutableDateTime; import org.joda.time.MutableDateTime;
import org.joda.time.ReadableDateTime; import org.joda.time.ReadableDateTime;
import java.io.IOException;
import java.util.AbstractList; import java.util.AbstractList;
import java.util.Arrays;
import java.util.Comparator; import java.util.Comparator;
import java.util.List; import java.util.List;
import java.util.function.UnaryOperator; import java.util.function.UnaryOperator;
@ -46,7 +50,7 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
/** /**
* Set the current doc ID. * Set the current doc ID.
*/ */
public abstract void setNextDocId(int docId); public abstract void setNextDocId(int docId) throws IOException;
/** /**
* Return a copy of the list of the values for the current document. * Return a copy of the list of the values for the current document.
@ -83,24 +87,48 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
public static final class Strings extends ScriptDocValues<String> { public static final class Strings extends ScriptDocValues<String> {
private final SortedBinaryDocValues values; private final SortedBinaryDocValues in;
private BytesRefBuilder[] values = new BytesRefBuilder[0];
private int count;
public Strings(SortedBinaryDocValues values) { public Strings(SortedBinaryDocValues in) {
this.values = values; this.in = in;
} }
@Override @Override
public void setNextDocId(int docId) { public void setNextDocId(int docId) throws IOException {
values.setDocument(docId); if (in.advanceExact(docId)) {
resize(in.docValueCount());
for (int i = 0; i < count; i++) {
values[i].copyBytes(in.nextValue());
}
} else {
resize(0);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
if (newSize > values.length) {
final int oldLength = values.length;
values = ArrayUtil.grow(values, count);
for (int i = oldLength; i < values.length; ++i) {
values[i] = new BytesRefBuilder();
}
}
} }
public SortedBinaryDocValues getInternalValues() { public SortedBinaryDocValues getInternalValues() {
return this.values; return this.in;
} }
public BytesRef getBytesValue() { public BytesRef getBytesValue() {
if (values.count() > 0) { if (size() > 0) {
return values.valueAt(0); return values[0].get();
} else { } else {
return null; return null;
} }
@ -117,12 +145,12 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
@Override @Override
public String get(int index) { public String get(int index) {
return values.valueAt(index).utf8ToString(); return values[index].get().utf8ToString();
} }
@Override @Override
public int size() { public int size() {
return values.count(); return count;
} }
} }
@ -130,61 +158,81 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
public static final class Longs extends ScriptDocValues<Long> { public static final class Longs extends ScriptDocValues<Long> {
protected static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Longs.class)); protected static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Longs.class));
private final SortedNumericDocValues values; private final SortedNumericDocValues in;
private long[] values = new long[0];
private int count;
private Dates dates; private Dates dates;
private int docId = -1;
public Longs(SortedNumericDocValues in) {
this.in = in;
public Longs(SortedNumericDocValues values) {
this.values = values;
} }
@Override @Override
public void setNextDocId(int docId) { public void setNextDocId(int docId) throws IOException {
values.setDocument(docId); this.docId = docId;
if (dates != null) { if (in.advanceExact(docId)) {
dates.refreshArray(); resize(in.docValueCount());
for (int i = 0; i < count; i++) {
values[i] = in.nextValue();
}
} else {
resize(0);
} }
if (dates != null) {
dates.setNextDocId(docId);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
values = ArrayUtil.grow(values, count);
} }
public SortedNumericDocValues getInternalValues() { public SortedNumericDocValues getInternalValues() {
return this.values; return this.in;
} }
public long getValue() { public long getValue() {
int numValues = values.count(); if (count == 0) {
if (numValues == 0) {
return 0L; return 0L;
} }
return values.valueAt(0); return values[0];
} }
@Deprecated @Deprecated
public ReadableDateTime getDate() { public ReadableDateTime getDate() throws IOException {
deprecationLogger.deprecated("getDate on numeric fields is deprecated. Use a date field to get dates."); deprecationLogger.deprecated("getDate on numeric fields is deprecated. Use a date field to get dates.");
if (dates == null) { if (dates == null) {
dates = new Dates(values); dates = new Dates(in);
dates.refreshArray(); dates.setNextDocId(docId);
} }
return dates.getValue(); return dates.getValue();
} }
@Deprecated @Deprecated
public List<ReadableDateTime> getDates() { public List<ReadableDateTime> getDates() throws IOException {
deprecationLogger.deprecated("getDates on numeric fields is deprecated. Use a date field to get dates."); deprecationLogger.deprecated("getDates on numeric fields is deprecated. Use a date field to get dates.");
if (dates == null) { if (dates == null) {
dates = new Dates(values); dates = new Dates(in);
dates.refreshArray(); dates.setNextDocId(docId);
} }
return dates; return dates;
} }
@Override @Override
public Long get(int index) { public Long get(int index) {
return values.valueAt(index); return values[index];
} }
@Override @Override
public int size() { public int size() {
return values.count(); return count;
} }
} }
@ -193,22 +241,24 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
private static final ReadableDateTime EPOCH = new DateTime(0, DateTimeZone.UTC); private static final ReadableDateTime EPOCH = new DateTime(0, DateTimeZone.UTC);
private final SortedNumericDocValues values; private final SortedNumericDocValues in;
/** /**
* Values wrapped in {@link MutableDateTime}. Null by default an allocated on first usage so we allocate a reasonably size. We keep * Values wrapped in {@link MutableDateTime}. Null by default an allocated on first usage so we allocate a reasonably size. We keep
* this array so we don't have allocate new {@link MutableDateTime}s on every usage. Instead we reuse them for every document. * this array so we don't have allocate new {@link MutableDateTime}s on every usage. Instead we reuse them for every document.
*/ */
private MutableDateTime[] dates; private MutableDateTime[] dates;
private int count;
public Dates(SortedNumericDocValues values) { public Dates(SortedNumericDocValues in) {
this.values = values; this.in = in;
} }
/** /**
* Fetch the first field value or 0 millis after epoch if there are no values. * Fetch the first field value or 0 millis after epoch if there are no
* in.
*/ */
public ReadableDateTime getValue() { public ReadableDateTime getValue() {
if (values.count() == 0) { if (count == 0) {
return EPOCH; return EPOCH;
} }
return get(0); return get(0);
@ -234,113 +284,159 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
@Override @Override
public ReadableDateTime get(int index) { public ReadableDateTime get(int index) {
if (index >= values.count()) { if (index >= count) {
throw new IndexOutOfBoundsException( throw new IndexOutOfBoundsException(
"attempted to fetch the [" + index + "] date when there are only [" + values.count() + "] dates."); "attempted to fetch the [" + index + "] date when there are only ["
+ count + "] dates.");
} }
return dates[index]; return dates[index];
} }
@Override @Override
public int size() { public int size() {
return values.count(); return count;
} }
@Override @Override
public void setNextDocId(int docId) { public void setNextDocId(int docId) throws IOException {
values.setDocument(docId); if (in.advanceExact(docId)) {
count = in.docValueCount();
} else {
count = 0;
}
refreshArray(); refreshArray();
} }
/** /**
* Refresh the backing array. Package private so it can be called when {@link Longs} loads dates. * Refresh the backing array. Package private so it can be called when {@link Longs} loads dates.
*/ */
void refreshArray() { void refreshArray() throws IOException {
if (values.count() == 0) { if (count == 0) {
return; return;
} }
if (dates == null) { if (dates == null) {
// Happens for the document. We delay allocating dates so we can allocate it with a reasonable size. // Happens for the document. We delay allocating dates so we can allocate it with a reasonable size.
dates = new MutableDateTime[values.count()]; dates = new MutableDateTime[count];
for (int i = 0; i < dates.length; i++) { for (int i = 0; i < dates.length; i++) {
dates[i] = new MutableDateTime(values.valueAt(i), DateTimeZone.UTC); dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC);
} }
return; return;
} }
if (values.count() > dates.length) { if (count > dates.length) {
// Happens when we move to a new document and it has more dates than any documents before it. // Happens when we move to a new document and it has more dates than any documents before it.
MutableDateTime[] backup = dates; MutableDateTime[] backup = dates;
dates = new MutableDateTime[values.count()]; dates = new MutableDateTime[count];
System.arraycopy(backup, 0, dates, 0, backup.length); System.arraycopy(backup, 0, dates, 0, backup.length);
for (int i = 0; i < backup.length; i++) { for (int i = 0; i < backup.length; i++) {
dates[i].setMillis(values.valueAt(i)); dates[i].setMillis(in.nextValue());
} }
for (int i = backup.length; i < dates.length; i++) { for (int i = backup.length; i < dates.length; i++) {
dates[i] = new MutableDateTime(values.valueAt(i), DateTimeZone.UTC); dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC);
} }
return; return;
} }
for (int i = 0; i < values.count(); i++) { for (int i = 0; i < count; i++) {
dates[i].setMillis(values.valueAt(i)); dates[i].setMillis(in.nextValue());
} }
} }
} }
public static final class Doubles extends ScriptDocValues<Double> { public static final class Doubles extends ScriptDocValues<Double> {
private final SortedNumericDoubleValues values; private final SortedNumericDoubleValues in;
private double[] values = new double[0];
private int count;
public Doubles(SortedNumericDoubleValues values) { public Doubles(SortedNumericDoubleValues in) {
this.values = values; this.in = in;
} }
@Override @Override
public void setNextDocId(int docId) { public void setNextDocId(int docId) throws IOException {
values.setDocument(docId); if (in.advanceExact(docId)) {
resize(in.docValueCount());
for (int i = 0; i < count; i++) {
values[i] = in.nextValue();
}
} else {
resize(0);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
values = ArrayUtil.grow(values, count);
} }
public SortedNumericDoubleValues getInternalValues() { public SortedNumericDoubleValues getInternalValues() {
return this.values; return this.in;
} }
public double getValue() { public double getValue() {
int numValues = values.count(); if (count == 0) {
if (numValues == 0) {
return 0d; return 0d;
} }
return values.valueAt(0); return values[0];
} }
@Override @Override
public Double get(int index) { public Double get(int index) {
return values.valueAt(index); return values[index];
} }
@Override @Override
public int size() { public int size() {
return values.count(); return count;
} }
} }
public static final class GeoPoints extends ScriptDocValues<GeoPoint> { public static final class GeoPoints extends ScriptDocValues<GeoPoint> {
private final MultiGeoPointValues values; private final MultiGeoPointValues in;
private GeoPoint[] values = new GeoPoint[0];
private int count;
public GeoPoints(MultiGeoPointValues values) { public GeoPoints(MultiGeoPointValues in) {
this.values = values; this.in = in;
} }
@Override @Override
public void setNextDocId(int docId) { public void setNextDocId(int docId) throws IOException {
values.setDocument(docId); if (in.advanceExact(docId)) {
resize(in.docValueCount());
for (int i = 0; i < count; i++) {
GeoPoint point = in.nextValue();
values[i].reset(point.lat(), point.lon());
}
} else {
resize(0);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
if (newSize > values.length) {
int oldLength = values.length;
values = ArrayUtil.grow(values, count);
for (int i = oldLength; i < values.length; ++i) {
values[i] = new GeoPoint();
}
}
} }
public GeoPoint getValue() { public GeoPoint getValue() {
int numValues = values.count(); if (count == 0) {
if (numValues == 0) {
return null; return null;
} }
return values.valueAt(0); return values[0];
} }
public double getLat() { public double getLat() {
@ -371,13 +467,13 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
@Override @Override
public GeoPoint get(int index) { public GeoPoint get(int index) {
final GeoPoint point = values.valueAt(index); final GeoPoint point = values[index];
return new GeoPoint(point.lat(), point.lon()); return new GeoPoint(point.lat(), point.lon());
} }
@Override @Override
public int size() { public int size() {
return values.count(); return count;
} }
public double arcDistance(double lat, double lon) { public double arcDistance(double lat, double lon) {
@ -420,66 +516,114 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
public static final class Booleans extends ScriptDocValues<Boolean> { public static final class Booleans extends ScriptDocValues<Boolean> {
private final SortedNumericDocValues values; private final SortedNumericDocValues in;
private boolean[] values = new boolean[0];
private int count;
public Booleans(SortedNumericDocValues values) { public Booleans(SortedNumericDocValues in) {
this.values = values; this.in = in;
} }
@Override @Override
public void setNextDocId(int docId) { public void setNextDocId(int docId) throws IOException {
values.setDocument(docId); if (in.advanceExact(docId)) {
resize(in.docValueCount());
for (int i = 0; i < count; i++) {
values[i] = in.nextValue() == 1;
}
} else {
resize(0);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
values = grow(values, count);
} }
public boolean getValue() { public boolean getValue() {
return values.count() != 0 && values.valueAt(0) == 1; return count != 0 && values[0];
} }
@Override @Override
public Boolean get(int index) { public Boolean get(int index) {
return values.valueAt(index) == 1; return values[index];
} }
@Override @Override
public int size() { public int size() {
return values.count(); return count;
}
private static boolean[] grow(boolean[] array, int minSize) {
assert minSize >= 0 : "size must be positive (got " + minSize
+ "): likely integer overflow?";
if (array.length < minSize) {
return Arrays.copyOf(array, ArrayUtil.oversize(minSize, 1));
} else
return array;
} }
} }
public static final class BytesRefs extends ScriptDocValues<BytesRef> { public static final class BytesRefs extends ScriptDocValues<BytesRef> {
private final SortedBinaryDocValues values; private final SortedBinaryDocValues in;
private BytesRef[] values;
private int count;
public BytesRefs(SortedBinaryDocValues values) { public BytesRefs(SortedBinaryDocValues in) {
this.values = values; this.in = in;
} }
@Override @Override
public void setNextDocId(int docId) { public void setNextDocId(int docId) throws IOException {
values.setDocument(docId); if (in.advanceExact(docId)) {
resize(in.docValueCount());
for (int i = 0; i < count; i++) {
values[i] = in.nextValue();
}
} else {
resize(0);
}
}
/**
* Set the {@link #size()} and ensure that the {@link #values} array can
* store at least that many entries.
*/
protected void resize(int newSize) {
count = newSize;
if (values == null) {
values = new BytesRef[newSize];
} else {
values = ArrayUtil.grow(values, count);
}
} }
public SortedBinaryDocValues getInternalValues() { public SortedBinaryDocValues getInternalValues() {
return this.values; return this.in;
} }
public BytesRef getValue() { public BytesRef getValue() {
int numValues = values.count(); if (count == 0) {
if (numValues == 0) {
return new BytesRef(); return new BytesRef();
} }
return values.valueAt(0); return values[0];
} }
@Override @Override
public BytesRef get(int index) { public BytesRef get(int index) {
return values.valueAt(index); return values[index];
} }
@Override @Override
public int size() { public int size() {
return values.count(); return count;
} }
} }
} }

View file

@ -19,48 +19,34 @@
package org.elasticsearch.index.fielddata; package org.elasticsearch.index.fielddata;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import java.io.IOException;
final class SingletonMultiGeoPointValues extends MultiGeoPointValues { final class SingletonMultiGeoPointValues extends MultiGeoPointValues {
private final GeoPointValues in; private final GeoPointValues in;
private final Bits docsWithField;
private GeoPoint value;
private int count;
SingletonMultiGeoPointValues(GeoPointValues in, Bits docsWithField) { SingletonMultiGeoPointValues(GeoPointValues in) {
this.in = in; this.in = in;
this.docsWithField = docsWithField;
} }
@Override @Override
public void setDocument(int docID) { public boolean advanceExact(int doc) throws IOException {
value = in.get(docID); return in.advanceExact(doc);
if (value.lat() == Double.NaN && value.lon() == Double.NaN || (docsWithField != null && !docsWithField.get(docID))) {
count = 0;
} else {
count = 1;
}
} }
@Override @Override
public int count() { public int docValueCount() {
return count; return 1;
} }
@Override @Override
public GeoPoint valueAt(int index) { public GeoPoint nextValue() {
assert index == 0; return in.geoPointValue();
return value;
} }
public GeoPointValues getGeoPointValues() { GeoPointValues getGeoPointValues() {
return in; return in;
} }
public Bits getDocsWithField() {
return docsWithField;
}
} }

View file

@ -20,49 +20,35 @@
package org.elasticsearch.index.fielddata; package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.Bits.MatchAllBits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import java.io.IOException;
final class SingletonSortedBinaryDocValues extends SortedBinaryDocValues { final class SingletonSortedBinaryDocValues extends SortedBinaryDocValues {
private final BinaryDocValues in; private final BinaryDocValues in;
private final Bits docsWithField;
private BytesRef value;
private int count;
SingletonSortedBinaryDocValues(BinaryDocValues in, Bits docsWithField) { SingletonSortedBinaryDocValues(BinaryDocValues in) {
this.in = in; this.in = in;
this.docsWithField = docsWithField instanceof MatchAllBits ? null : docsWithField;
} }
@Override @Override
public void setDocument(int docID) { public boolean advanceExact(int doc) throws IOException {
value = in.get(docID); return in.advanceExact(doc);
if (value.length == 0 && docsWithField != null && !docsWithField.get(docID)) {
count = 0;
} else {
count = 1;
}
} }
@Override @Override
public int count() { public int docValueCount() {
return count; return 1;
} }
@Override @Override
public BytesRef valueAt(int index) { public BytesRef nextValue() throws IOException {
assert index == 0; return in.binaryValue();
return value;
} }
public BinaryDocValues getBinaryDocValues() { public BinaryDocValues getBinaryDocValues() {
return in; return in;
} }
public Bits getDocsWithField() {
return docsWithField;
}
} }

View file

@ -19,8 +19,7 @@
package org.elasticsearch.index.fielddata; package org.elasticsearch.index.fielddata;
import org.apache.lucene.util.Bits; import java.io.IOException;
import org.apache.lucene.util.Bits.MatchAllBits;
/** /**
* Exposes multi-valued view over a single-valued instance. * Exposes multi-valued view over a single-valued instance.
@ -29,43 +28,30 @@ import org.apache.lucene.util.Bits.MatchAllBits;
* that works for single or multi-valued types. * that works for single or multi-valued types.
*/ */
final class SingletonSortedNumericDoubleValues extends SortedNumericDoubleValues { final class SingletonSortedNumericDoubleValues extends SortedNumericDoubleValues {
private final NumericDoubleValues in; private final NumericDoubleValues in;
private final Bits docsWithField;
private double value;
private int count;
SingletonSortedNumericDoubleValues(NumericDoubleValues in, Bits docsWithField) { SingletonSortedNumericDoubleValues(NumericDoubleValues in) {
this.in = in; this.in = in;
this.docsWithField = docsWithField instanceof MatchAllBits ? null : docsWithField;
}
/** Return the wrapped {@link NumericDoubleValues} */
public NumericDoubleValues getNumericDoubleValues() {
return in;
}
/** Return the wrapped {@link Bits} */
public Bits getDocsWithField() {
return docsWithField;
}
@Override
public void setDocument(int doc) {
value = in.get(doc);
if (docsWithField != null && value == 0 && docsWithField.get(doc) == false) {
count = 0;
} else {
count = 1;
} }
}
@Override /** Return the wrapped {@link NumericDoubleValues} */
public double valueAt(int index) { public NumericDoubleValues getNumericDoubleValues() {
return value; return in;
} }
@Override
public boolean advanceExact(int target) throws IOException {
return in.advanceExact(target);
}
@Override
public int docValueCount() {
return 1;
}
@Override
public double nextValue() throws IOException {
return in.doubleValue();
}
@Override
public int count() {
return count;
}
} }

View file

@ -22,13 +22,16 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
import java.io.IOException;
/** /**
* {@link NumericDocValues} instance that wraps a {@link NumericDoubleValues} * {@link NumericDocValues} instance that wraps a {@link NumericDoubleValues}
* and converts the doubles to sortable long bits using * and converts the doubles to sortable long bits using
* {@link NumericUtils#doubleToSortableLong(double)}. * {@link NumericUtils#doubleToSortableLong(double)}.
*/ */
final class SortableLongBitsNumericDocValues extends NumericDocValues { final class SortableLongBitsNumericDocValues extends AbstractNumericDocValues {
private int docID = -1;
private final NumericDoubleValues values; private final NumericDoubleValues values;
SortableLongBitsNumericDocValues(NumericDoubleValues values) { SortableLongBitsNumericDocValues(NumericDoubleValues values) {
@ -36,8 +39,19 @@ final class SortableLongBitsNumericDocValues extends NumericDocValues {
} }
@Override @Override
public long get(int docID) { public long longValue() throws IOException {
return NumericUtils.doubleToSortableLong(values.get(docID)); return NumericUtils.doubleToSortableLong(values.doubleValue());
}
@Override
public boolean advanceExact(int target) throws IOException {
docID = target;
return values.advanceExact(target);
}
@Override
public int docID() {
return docID;
} }
/** Return the wrapped values. */ /** Return the wrapped values. */

View file

@ -22,12 +22,14 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
import java.io.IOException;
/** /**
* {@link SortedNumericDocValues} instance that wraps a {@link SortedNumericDoubleValues} * {@link SortedNumericDocValues} instance that wraps a {@link SortedNumericDoubleValues}
* and converts the doubles to sortable long bits using * and converts the doubles to sortable long bits using
* {@link NumericUtils#doubleToSortableLong(double)}. * {@link NumericUtils#doubleToSortableLong(double)}.
*/ */
final class SortableLongBitsSortedNumericDocValues extends SortedNumericDocValues { final class SortableLongBitsSortedNumericDocValues extends AbstractSortedNumericDocValues {
private final SortedNumericDoubleValues values; private final SortedNumericDoubleValues values;
@ -36,18 +38,18 @@ final class SortableLongBitsSortedNumericDocValues extends SortedNumericDocValue
} }
@Override @Override
public void setDocument(int doc) { public boolean advanceExact(int target) throws IOException {
values.setDocument(doc); return values.advanceExact(target);
} }
@Override @Override
public long valueAt(int index) { public long nextValue() throws IOException {
return NumericUtils.doubleToSortableLong(values.valueAt(index)); return NumericUtils.doubleToSortableLong(values.nextValue());
} }
@Override @Override
public int count() { public int docValueCount() {
return values.count(); return values.docValueCount();
} }
/** Return the wrapped values. */ /** Return the wrapped values. */

View file

@ -22,6 +22,8 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
import java.io.IOException;
/** /**
* {@link NumericDoubleValues} instance that wraps a {@link NumericDocValues} * {@link NumericDoubleValues} instance that wraps a {@link NumericDocValues}
* and converts the doubles to sortable long bits using * and converts the doubles to sortable long bits using
@ -36,8 +38,13 @@ final class SortableLongBitsToNumericDoubleValues extends NumericDoubleValues {
} }
@Override @Override
public double get(int docID) { public double doubleValue() throws IOException {
return NumericUtils.sortableLongToDouble(values.get(docID)); return NumericUtils.sortableLongToDouble(values.longValue());
}
@Override
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
} }
/** Return the wrapped values. */ /** Return the wrapped values. */

View file

@ -22,6 +22,8 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
import java.io.IOException;
/** /**
* {@link SortedNumericDoubleValues} instance that wraps a {@link SortedNumericDocValues} * {@link SortedNumericDoubleValues} instance that wraps a {@link SortedNumericDocValues}
* and converts the doubles to sortable long bits using * and converts the doubles to sortable long bits using
@ -36,18 +38,18 @@ final class SortableLongBitsToSortedNumericDoubleValues extends SortedNumericDou
} }
@Override @Override
public void setDocument(int doc) { public boolean advanceExact(int target) throws IOException {
values.setDocument(doc); return values.advanceExact(target);
} }
@Override @Override
public double valueAt(int index) { public double nextValue() throws IOException {
return NumericUtils.sortableLongToDouble(values.valueAt(index)); return NumericUtils.sortableLongToDouble(values.nextValue());
} }
@Override @Override
public int count() { public int docValueCount() {
return values.count(); return values.docValueCount();
} }
/** Return the wrapped values. */ /** Return the wrapped values. */

View file

@ -21,28 +21,35 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import java.io.IOException;
/** /**
* A list of per-document binary values, sorted * A list of per-document binary values, sorted
* according to {@link BytesRef#compareTo(BytesRef)}. * according to {@link BytesRef#compareTo(BytesRef)}.
* There might be dups however. * There might be dups however.
*/ */
// TODO: Should it expose a count (current approach) or return null when there are no more values?
public abstract class SortedBinaryDocValues { public abstract class SortedBinaryDocValues {
/** /**
* Positions to the specified document * Advance this instance to the given document id
* @return true if there is a value for this document
*/ */
public abstract void setDocument(int docId); public abstract boolean advanceExact(int doc) throws IOException;
/** /**
* Return the number of values of the current document. * Retrieves the number of values for the current document. This must always
* be greater than zero.
* It is illegal to call this method after {@link #advanceExact(int)}
* returned {@code false}.
*/ */
public abstract int count(); public abstract int docValueCount();
/** /**
* Retrieve the value for the current document at the specified index. * Iterates to the next value in the current document. Do not call this more than
* An index ranges from {@code 0} to {@code count()-1}. * {@link #docValueCount} times for the document.
* Note that the returned {@link BytesRef} might be reused across invocations. * Note that the returned {@link BytesRef} might be reused across invocations.
*/ */
public abstract BytesRef valueAt(int index); public abstract BytesRef nextValue() throws IOException;
} }

View file

@ -21,6 +21,8 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedNumericDocValues;
import java.io.IOException;
/** /**
* Clone of {@link SortedNumericDocValues} for double values. * Clone of {@link SortedNumericDocValues} for double values.
*/ */
@ -30,21 +32,25 @@ public abstract class SortedNumericDoubleValues {
* constructors, typically implicit.) */ * constructors, typically implicit.) */
protected SortedNumericDoubleValues() {} protected SortedNumericDoubleValues() {}
/** /** Advance the iterator to exactly {@code target} and return whether
* Positions to the specified document * {@code target} has a value.
*/ * {@code target} must be greater than or equal to the current
public abstract void setDocument(int doc); * doc ID and must be a valid doc ID, ie. &ge; 0 and
* &lt; {@code maxDoc}.*/
public abstract boolean advanceExact(int target) throws IOException;
/** /**
* Retrieve the value for the current document at the specified index. * Iterates to the next value in the current document. Do not call this more than
* An index ranges from {@code 0} to {@code count()-1}. * {@link #docValueCount} times for the document.
*/ */
public abstract double valueAt(int index); public abstract double nextValue() throws IOException;
/** /**
* Retrieves the count of values for the current document. * Retrieves the number of values for the current document. This must always
* This may be zero if a document has no values. * be greater than zero.
* It is illegal to call this method after {@link #advanceExact(int)}
* returned {@code false}.
*/ */
public abstract int count(); public abstract int docValueCount();
} }

View file

@ -33,6 +33,7 @@ import java.util.Arrays;
*/ */
public abstract class SortingBinaryDocValues extends SortedBinaryDocValues { public abstract class SortingBinaryDocValues extends SortedBinaryDocValues {
private int index;
protected int count; protected int count;
protected BytesRefBuilder[] values; protected BytesRefBuilder[] values;
private final Sorter sorter; private final Sorter sorter;
@ -73,15 +74,17 @@ public abstract class SortingBinaryDocValues extends SortedBinaryDocValues {
*/ */
protected final void sort() { protected final void sort() {
sorter.sort(0, count); sorter.sort(0, count);
index = 0;
} }
@Override @Override
public final int count() { public int docValueCount() {
return count; return count;
} }
@Override @Override
public final BytesRef valueAt(int index) { public final BytesRef nextValue() {
return values[index].get(); assert index < count;
return values[index++].get();
} }
} }

View file

@ -31,10 +31,12 @@ public abstract class SortingNumericDocValues extends SortedNumericDocValues {
private int count; private int count;
protected long[] values; protected long[] values;
protected int valuesCursor;
private final Sorter sorter; private final Sorter sorter;
protected SortingNumericDocValues() { protected SortingNumericDocValues() {
values = new long[1]; values = new long[1];
valuesCursor = 0;
sorter = new InPlaceMergeSorter() { sorter = new InPlaceMergeSorter() {
@Override @Override
@ -52,12 +54,13 @@ public abstract class SortingNumericDocValues extends SortedNumericDocValues {
} }
/** /**
* Set the {@link #count()} and ensure that the {@link #values} array can * Set the {@link #docValueCount()} and ensure that the {@link #values} array can
* store at least that many entries. * store at least that many entries.
*/ */
protected final void resize(int newSize) { protected final void resize(int newSize) {
count = newSize; count = newSize;
values = ArrayUtil.grow(values, count); values = ArrayUtil.grow(values, count);
valuesCursor = 0;
} }
/** /**
@ -69,12 +72,12 @@ public abstract class SortingNumericDocValues extends SortedNumericDocValues {
} }
@Override @Override
public final int count() { public final int docValueCount() {
return count; return count;
} }
@Override @Override
public final long valueAt(int index) { public final long nextValue() {
return values[index]; return values[valuesCursor++];
} }
} }

View file

@ -29,11 +29,13 @@ import org.apache.lucene.util.Sorter;
public abstract class SortingNumericDoubleValues extends SortedNumericDoubleValues { public abstract class SortingNumericDoubleValues extends SortedNumericDoubleValues {
private int count; private int count;
private int valuesCursor;
protected double[] values; protected double[] values;
private final Sorter sorter; private final Sorter sorter;
protected SortingNumericDoubleValues() { protected SortingNumericDoubleValues() {
values = new double[1]; values = new double[1];
valuesCursor = 0;
sorter = new InPlaceMergeSorter() { sorter = new InPlaceMergeSorter() {
@Override @Override
@ -51,29 +53,30 @@ public abstract class SortingNumericDoubleValues extends SortedNumericDoubleValu
} }
/** /**
* Set the {@link #count()} and ensure that the {@link #values} array can * Set the {@link #docValueCount()} and ensure that the {@link #values} array can
* store at least that many entries. * store at least that many entries.
*/ */
protected final void resize(int newSize) { protected final void resize(int newSize) {
count = newSize; count = newSize;
values = ArrayUtil.grow(values, count); values = ArrayUtil.grow(values, count);
valuesCursor = 0;
} }
/** /**
* Sort values that are stored between offsets <code>0</code> and * Sort values that are stored between offsets <code>0</code> and
* {@link #count} of {@link #values}. * {@link #docValueCount} of {@link #values}.
*/ */
protected final void sort() { protected final void sort() {
sorter.sort(0, count); sorter.sort(0, count);
} }
@Override @Override
public final int count() { public final int docValueCount() {
return count; return count;
} }
@Override @Override
public final double valueAt(int index) { public final double nextValue() {
return values[index]; return values[valuesCursor++];
} }
} }

View file

@ -21,15 +21,15 @@ package org.elasticsearch.index.fielddata.fieldcomparator;
import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.index.fielddata.AbstractSortedDocValues;
import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
@ -89,7 +89,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
@Override @Override
protected SortedDocValues getSortedDocValues(LeafReaderContext context, String field) throws IOException { protected SortedDocValues getSortedDocValues(LeafReaderContext context, String field) throws IOException {
final RandomAccessOrds values = ((IndexOrdinalsFieldData) indexFieldData).load(context).getOrdinalsValues(); final SortedSetDocValues values = ((IndexOrdinalsFieldData) indexFieldData).load(context).getOrdinalsValues();
final SortedDocValues selectedValues; final SortedDocValues selectedValues;
if (nested == null) { if (nested == null) {
selectedValues = sortMode.select(values); selectedValues = sortMode.select(values);
@ -113,8 +113,6 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
}; };
} }
final BytesRef nullPlaceHolder = new BytesRef();
final BytesRef nonNullMissingBytes = missingBytes == null ? nullPlaceHolder : missingBytes;
return new FieldComparator.TermValComparator(numHits, null, sortMissingLast) { return new FieldComparator.TermValComparator(numHits, null, sortMissingLast) {
@Override @Override
@ -122,25 +120,15 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
final SortedBinaryDocValues values = getValues(context); final SortedBinaryDocValues values = getValues(context);
final BinaryDocValues selectedValues; final BinaryDocValues selectedValues;
if (nested == null) { if (nested == null) {
selectedValues = sortMode.select(values, nonNullMissingBytes); selectedValues = sortMode.select(values, missingBytes);
} else { } else {
final BitSet rootDocs = nested.rootDocs(context); final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context);
selectedValues = sortMode.select(values, nonNullMissingBytes, rootDocs, innerDocs, context.reader().maxDoc()); selectedValues = sortMode.select(values, missingBytes, rootDocs, innerDocs, context.reader().maxDoc());
} }
return selectedValues; return selectedValues;
} }
@Override
protected Bits getDocsWithField(LeafReaderContext context, String field) throws IOException {
return new Bits.MatchAllBits(context.reader().maxDoc());
}
@Override
protected boolean isNull(int doc, BytesRef term) {
return term == nullPlaceHolder;
}
@Override @Override
public void setScorer(Scorer scorer) { public void setScorer(Scorer scorer) {
BytesRefFieldComparatorSource.this.setScorer(scorer); BytesRefFieldComparatorSource.this.setScorer(scorer);
@ -154,13 +142,14 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
* are replaced with the specified term * are replaced with the specified term
*/ */
// TODO: move this out if we need it for other reasons // TODO: move this out if we need it for other reasons
static class ReplaceMissing extends SortedDocValues { static class ReplaceMissing extends AbstractSortedDocValues {
final SortedDocValues in; final SortedDocValues in;
final int substituteOrd; final int substituteOrd;
final BytesRef substituteTerm; final BytesRef substituteTerm;
final boolean exists; final boolean exists;
boolean hasValue;
ReplaceMissing(SortedDocValues in, BytesRef term) { ReplaceMissing(SortedDocValues in, BytesRef term) throws IOException {
this.in = in; this.in = in;
this.substituteTerm = term; this.substituteTerm = term;
int sub = in.lookupTerm(term); int sub = in.lookupTerm(term);
@ -174,17 +163,29 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
} }
@Override @Override
public int getOrd(int docID) { public int ordValue() throws IOException {
int ord = in.getOrd(docID); if (hasValue == false) {
if (ord < 0) {
return substituteOrd; return substituteOrd;
} else if (exists == false && ord >= substituteOrd) { }
int ord = in.ordValue();
if (exists == false && ord >= substituteOrd) {
return ord + 1; return ord + 1;
} else { } else {
return ord; return ord;
} }
} }
@Override
public boolean advanceExact(int target) throws IOException {
hasValue = in.advanceExact(target);
return true;
}
@Override
public int docID() {
return in.docID();
}
@Override @Override
public int getValueCount() { public int getValueCount() {
if (exists) { if (exists) {
@ -195,7 +196,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
} }
@Override @Override
public BytesRef lookupOrd(int ord) { public BytesRef lookupOrd(int ord) throws IOException {
if (ord == substituteOrd) { if (ord == substituteOrd) {
return substituteTerm; return substituteTerm;
} else if (exists == false && ord > substituteOrd) { } else if (exists == false && ord > substituteOrd) {

View file

@ -20,22 +20,23 @@
package org.elasticsearch.index.fielddata.ordinals; package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.MultiDocValues.OrdinalMap; import org.apache.lucene.index.MultiDocValues.OrdinalMap;
import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongValues; import org.apache.lucene.util.LongValues;
import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds;
import java.io.IOException;
/** /**
* A {@link RandomAccessOrds} implementation that returns ordinals that are global. * A {@link SortedSetDocValues} implementation that returns ordinals that are global.
*/ */
public class GlobalOrdinalMapping extends AbstractRandomAccessOrds { public class GlobalOrdinalMapping extends SortedSetDocValues {
private final RandomAccessOrds values; private final SortedSetDocValues values;
private final OrdinalMap ordinalMap; private final OrdinalMap ordinalMap;
private final LongValues mapping; private final LongValues mapping;
private final RandomAccessOrds[] bytesValues; private final SortedSetDocValues[] bytesValues;
GlobalOrdinalMapping(OrdinalMap ordinalMap, RandomAccessOrds[] bytesValues, int segmentIndex) { GlobalOrdinalMapping(OrdinalMap ordinalMap, SortedSetDocValues[] bytesValues, int segmentIndex) {
super(); super();
this.values = bytesValues[segmentIndex]; this.values = bytesValues[segmentIndex];
this.bytesValues = bytesValues; this.bytesValues = bytesValues;
@ -53,25 +54,45 @@ public class GlobalOrdinalMapping extends AbstractRandomAccessOrds {
} }
@Override @Override
public long ordAt(int index) { public boolean advanceExact(int target) throws IOException {
return getGlobalOrd(values.ordAt(index)); return values.advanceExact(target);
} }
@Override @Override
public void doSetDocument(int docId) { public long nextOrd() throws IOException {
values.setDocument(docId); long segmentOrd = values.nextOrd();
if (segmentOrd == SortedSetDocValues.NO_MORE_ORDS) {
return SortedSetDocValues.NO_MORE_ORDS;
} else {
return getGlobalOrd(segmentOrd);
}
} }
@Override @Override
public int cardinality() { public BytesRef lookupOrd(long globalOrd) throws IOException {
return values.cardinality();
}
@Override
public BytesRef lookupOrd(long globalOrd) {
final long segmentOrd = ordinalMap.getFirstSegmentOrd(globalOrd); final long segmentOrd = ordinalMap.getFirstSegmentOrd(globalOrd);
int readerIndex = ordinalMap.getFirstSegmentNumber(globalOrd); int readerIndex = ordinalMap.getFirstSegmentNumber(globalOrd);
return bytesValues[readerIndex].lookupOrd(segmentOrd); return bytesValues[readerIndex].lookupOrd(segmentOrd);
} }
@Override
public int docID() {
return values.docID();
}
@Override
public int nextDoc() throws IOException {
return values.nextDoc();
}
@Override
public int advance(int target) throws IOException {
return values.advance(target);
}
@Override
public long cost() {
return values.cost();
}
} }

View file

@ -23,7 +23,7 @@ import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiDocValues.OrdinalMap; import org.apache.lucene.index.MultiDocValues.OrdinalMap;
import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedInts;
import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreaker;
@ -52,12 +52,12 @@ public enum GlobalOrdinalsBuilder {
*/ */
public static IndexOrdinalsFieldData build(final IndexReader indexReader, IndexOrdinalsFieldData indexFieldData, public static IndexOrdinalsFieldData build(final IndexReader indexReader, IndexOrdinalsFieldData indexFieldData,
IndexSettings indexSettings, CircuitBreakerService breakerService, Logger logger, IndexSettings indexSettings, CircuitBreakerService breakerService, Logger logger,
Function<RandomAccessOrds, ScriptDocValues<?>> scriptFunction) throws IOException { Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction) throws IOException {
assert indexReader.leaves().size() > 1; assert indexReader.leaves().size() > 1;
long startTimeNS = System.nanoTime(); long startTimeNS = System.nanoTime();
final AtomicOrdinalsFieldData[] atomicFD = new AtomicOrdinalsFieldData[indexReader.leaves().size()]; final AtomicOrdinalsFieldData[] atomicFD = new AtomicOrdinalsFieldData[indexReader.leaves().size()];
final RandomAccessOrds[] subs = new RandomAccessOrds[indexReader.leaves().size()]; final SortedSetDocValues[] subs = new SortedSetDocValues[indexReader.leaves().size()];
for (int i = 0; i < indexReader.leaves().size(); ++i) { for (int i = 0; i < indexReader.leaves().size(); ++i) {
atomicFD[i] = indexFieldData.load(indexReader.leaves().get(i)); atomicFD[i] = indexFieldData.load(indexReader.leaves().get(i));
subs[i] = atomicFD[i].getOrdinalsValues(); subs[i] = atomicFD[i].getOrdinalsValues();
@ -83,11 +83,11 @@ public enum GlobalOrdinalsBuilder {
assert indexReader.leaves().size() > 1; assert indexReader.leaves().size() > 1;
final AtomicOrdinalsFieldData[] atomicFD = new AtomicOrdinalsFieldData[indexReader.leaves().size()]; final AtomicOrdinalsFieldData[] atomicFD = new AtomicOrdinalsFieldData[indexReader.leaves().size()];
final RandomAccessOrds[] subs = new RandomAccessOrds[indexReader.leaves().size()]; final SortedSetDocValues[] subs = new SortedSetDocValues[indexReader.leaves().size()];
for (int i = 0; i < indexReader.leaves().size(); ++i) { for (int i = 0; i < indexReader.leaves().size(); ++i) {
atomicFD[i] = new AbstractAtomicOrdinalsFieldData(AbstractAtomicOrdinalsFieldData.DEFAULT_SCRIPT_FUNCTION) { atomicFD[i] = new AbstractAtomicOrdinalsFieldData(AbstractAtomicOrdinalsFieldData.DEFAULT_SCRIPT_FUNCTION) {
@Override @Override
public RandomAccessOrds getOrdinalsValues() { public SortedSetDocValues getOrdinalsValues() {
return DocValues.emptySortedSet(); return DocValues.emptySortedSet();
} }

View file

@ -20,7 +20,7 @@ package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.MultiDocValues.OrdinalMap; import org.apache.lucene.index.MultiDocValues.OrdinalMap;
import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
@ -36,10 +36,10 @@ import java.util.function.Function;
final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFieldData { final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFieldData {
private final Atomic[] atomicReaders; private final Atomic[] atomicReaders;
private final Function<RandomAccessOrds, ScriptDocValues<?>> scriptFunction; private final Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction;
InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, AtomicOrdinalsFieldData[] segmentAfd, InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, AtomicOrdinalsFieldData[] segmentAfd,
OrdinalMap ordinalMap, long memorySizeInBytes, Function<RandomAccessOrds, ScriptDocValues<?>> scriptFunction) { OrdinalMap ordinalMap, long memorySizeInBytes, Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction) {
super(indexSettings, fieldName, memorySizeInBytes); super(indexSettings, fieldName, memorySizeInBytes);
this.atomicReaders = new Atomic[segmentAfd.length]; this.atomicReaders = new Atomic[segmentAfd.length];
for (int i = 0; i < segmentAfd.length; i++) { for (int i = 0; i < segmentAfd.length; i++) {
@ -67,13 +67,13 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel
} }
@Override @Override
public RandomAccessOrds getOrdinalsValues() { public SortedSetDocValues getOrdinalsValues() {
final RandomAccessOrds values = afd.getOrdinalsValues(); final SortedSetDocValues values = afd.getOrdinalsValues();
if (values.getValueCount() == ordinalMap.getValueCount()) { if (values.getValueCount() == ordinalMap.getValueCount()) {
// segment ordinals match global ordinals // segment ordinals match global ordinals
return values; return values;
} }
final RandomAccessOrds[] bytesValues = new RandomAccessOrds[atomicReaders.length]; final SortedSetDocValues[] bytesValues = new SortedSetDocValues[atomicReaders.length];
for (int i = 0; i < bytesValues.length; i++) { for (int i = 0; i < bytesValues.length; i++) {
bytesValues[i] = atomicReaders[i].afd.getOrdinalsValues(); bytesValues[i] = atomicReaders[i].afd.getOrdinalsValues();
} }

View file

@ -20,16 +20,17 @@
package org.elasticsearch.index.fielddata.ordinals; package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables; import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.LongsRef; import org.apache.lucene.util.LongsRef;
import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PackedLongValues; import org.apache.lucene.util.packed.PackedLongValues;
import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds; import org.elasticsearch.index.fielddata.AbstractSortedDocValues;
import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues;
import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
@ -100,21 +101,25 @@ public class MultiOrdinals extends Ordinals {
} }
@Override @Override
public RandomAccessOrds ordinals(ValuesHolder values) { public SortedSetDocValues ordinals(ValuesHolder values) {
if (multiValued) { if (multiValued) {
return new MultiDocs(this, values); return new MultiDocs(this, values);
} else { } else {
return (RandomAccessOrds) DocValues.singleton(new SingleDocs(this, values)); return (SortedSetDocValues) DocValues.singleton(new SingleDocs(this, values));
} }
} }
private static class SingleDocs extends SortedDocValues { private static class SingleDocs extends AbstractSortedDocValues {
private final int valueCount; private final int valueCount;
private final PackedLongValues endOffsets; private final PackedLongValues endOffsets;
private final PackedLongValues ords; private final PackedLongValues ords;
private final ValuesHolder values; private final ValuesHolder values;
private int currentDoc = -1;
private long currentStartOffset;
private long currentEndOffset;
SingleDocs(MultiOrdinals ordinals, ValuesHolder values) { SingleDocs(MultiOrdinals ordinals, ValuesHolder values) {
this.valueCount = (int) ordinals.valueCount; this.valueCount = (int) ordinals.valueCount;
this.endOffsets = ordinals.endOffsets; this.endOffsets = ordinals.endOffsets;
@ -123,10 +128,21 @@ public class MultiOrdinals extends Ordinals {
} }
@Override @Override
public int getOrd(int docId) { public int ordValue() {
final long startOffset = docId != 0 ? endOffsets.get(docId - 1) : 0; return (int) ords.get(currentStartOffset);
final long endOffset = endOffsets.get(docId); }
return startOffset == endOffset ? -1 : (int) ords.get(startOffset);
@Override
public boolean advanceExact(int docId) throws IOException {
currentDoc = docId;
currentStartOffset = docId != 0 ? endOffsets.get(docId - 1) : 0;
currentEndOffset = endOffsets.get(docId);
return currentStartOffset != currentEndOffset;
}
@Override
public int docID() {
return currentDoc;
} }
@Override @Override
@ -141,15 +157,16 @@ public class MultiOrdinals extends Ordinals {
} }
private static class MultiDocs extends AbstractRandomAccessOrds { private static class MultiDocs extends AbstractSortedSetDocValues {
private final long valueCount; private final long valueCount;
private final PackedLongValues endOffsets; private final PackedLongValues endOffsets;
private final PackedLongValues ords; private final PackedLongValues ords;
private long offset;
private int cardinality;
private final ValuesHolder values; private final ValuesHolder values;
private long currentOffset;
private long currentEndOffset;
MultiDocs(MultiOrdinals ordinals, ValuesHolder values) { MultiDocs(MultiOrdinals ordinals, ValuesHolder values) {
this.valueCount = ordinals.valueCount; this.valueCount = ordinals.valueCount;
this.endOffsets = ordinals.endOffsets; this.endOffsets = ordinals.endOffsets;
@ -163,21 +180,19 @@ public class MultiOrdinals extends Ordinals {
} }
@Override @Override
public void doSetDocument(int docId) { public boolean advanceExact(int docId) throws IOException {
final long startOffset = docId != 0 ? endOffsets.get(docId - 1) : 0; currentOffset = docId != 0 ? endOffsets.get(docId - 1) : 0;
final long endOffset = endOffsets.get(docId); currentEndOffset = endOffsets.get(docId);
offset = startOffset; return currentOffset != currentEndOffset;
cardinality = (int) (endOffset - startOffset);
} }
@Override @Override
public int cardinality() { public long nextOrd() throws IOException {
return cardinality; if (currentOffset == currentEndOffset) {
} return SortedSetDocValues.NO_MORE_ORDS;
} else {
@Override return ords.get(currentOffset++);
public long ordAt(int index) { }
return ords.get(offset + index);
} }
@Override @Override

View file

@ -19,7 +19,7 @@
package org.elasticsearch.index.fielddata.ordinals; package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -41,9 +41,9 @@ public abstract class Ordinals implements Accountable {
@Override @Override
public abstract long ramBytesUsed(); public abstract long ramBytesUsed();
public abstract RandomAccessOrds ordinals(ValuesHolder values); public abstract SortedSetDocValues ordinals(ValuesHolder values);
public final RandomAccessOrds ordinals() { public final SortedSetDocValues ordinals() {
return ordinals(NO_VALUES); return ordinals(NO_VALUES);
} }

View file

@ -19,17 +19,14 @@
package org.elasticsearch.index.fielddata.ordinals; package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.FilteredTermsEnum;
import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.BytesRefIterator;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LegacyNumericUtils;
import org.apache.lucene.util.LongsRef; import org.apache.lucene.util.LongsRef;
import org.apache.lucene.util.packed.GrowableWriter; import org.apache.lucene.util.packed.GrowableWriter;
import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedInts;
@ -414,23 +411,6 @@ public final class OrdinalsBuilder implements Closeable {
} }
} }
/**
* A {@link TermsEnum} that iterates only highest resolution geo prefix coded terms.
*
* @see #buildFromTerms(TermsEnum)
*/
public static TermsEnum wrapGeoPointTerms(TermsEnum termsEnum) {
return new FilteredTermsEnum(termsEnum, false) {
@Override
protected AcceptStatus accept(BytesRef term) throws IOException {
// accept only the max resolution terms
// todo is this necessary?
return GeoPointField.getPrefixCodedShift(term) == GeoPointField.PRECISION_STEP * 4 ?
AcceptStatus.YES : AcceptStatus.END;
}
};
}
/** /**
* Returns the maximum document ID this builder can associate with an ordinal * Returns the maximum document ID this builder can associate with an ordinal
@ -439,51 +419,12 @@ public final class OrdinalsBuilder implements Closeable {
return maxDoc; return maxDoc;
} }
/**
* A {@link TermsEnum} that iterates only full precision prefix coded 64 bit values.
*
* @see #buildFromTerms(TermsEnum)
*/
public static TermsEnum wrapNumeric64Bit(TermsEnum termsEnum) {
return new FilteredTermsEnum(termsEnum, false) {
@Override
protected AcceptStatus accept(BytesRef term) throws IOException {
// we stop accepting terms once we moved across the prefix codec terms - redundant values!
return LegacyNumericUtils.getPrefixCodedLongShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END;
}
};
}
/**
* A {@link TermsEnum} that iterates only full precision prefix coded 32 bit values.
*
* @see #buildFromTerms(TermsEnum)
*/
public static TermsEnum wrapNumeric32Bit(TermsEnum termsEnum) {
return new FilteredTermsEnum(termsEnum, false) {
@Override
protected AcceptStatus accept(BytesRef term) throws IOException {
// we stop accepting terms once we moved across the prefix codec terms - redundant values!
return LegacyNumericUtils.getPrefixCodedIntShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END;
}
};
}
/** /**
* This method iterates all terms in the given {@link TermsEnum} and * This method iterates all terms in the given {@link TermsEnum} and
* associates each terms ordinal with the terms documents. The caller must * associates each terms ordinal with the terms documents. The caller must
* exhaust the returned {@link BytesRefIterator} which returns all values * exhaust the returned {@link BytesRefIterator} which returns all values
* where the first returned value is associated with the ordinal <tt>1</tt> * where the first returned value is associated with the ordinal <tt>1</tt>
* etc. * etc.
* <p>
* If the {@link TermsEnum} contains prefix coded numerical values the terms
* enum should be wrapped with either {@link #wrapNumeric32Bit(TermsEnum)}
* or {@link #wrapNumeric64Bit(TermsEnum)} depending on its precision. If
* the {@link TermsEnum} is not wrapped the returned
* {@link BytesRefIterator} will contain partial precision terms rather than
* only full-precision terms.
* </p>
*/ */
public BytesRefIterator buildFromTerms(final TermsEnum termsEnum) throws IOException { public BytesRefIterator buildFromTerms(final TermsEnum termsEnum) throws IOException {
return new BytesRefIterator() { return new BytesRefIterator() {

View file

@ -20,14 +20,15 @@
package org.elasticsearch.index.fielddata.ordinals; package org.elasticsearch.index.fielddata.ordinals;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables; import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedInts;
import org.elasticsearch.index.fielddata.AbstractSortedDocValues;
import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
@ -57,16 +58,19 @@ public class SinglePackedOrdinals extends Ordinals {
} }
@Override @Override
public RandomAccessOrds ordinals(ValuesHolder values) { public SortedSetDocValues ordinals(ValuesHolder values) {
return (RandomAccessOrds) DocValues.singleton(new Docs(this, values)); return (SortedSetDocValues) DocValues.singleton(new Docs(this, values));
} }
private static class Docs extends SortedDocValues { private static class Docs extends AbstractSortedDocValues {
private final int maxOrd; private final int maxOrd;
private final PackedInts.Reader reader; private final PackedInts.Reader reader;
private final ValuesHolder values; private final ValuesHolder values;
private int currentDoc = -1;
private int currentOrd;
Docs(SinglePackedOrdinals parent, ValuesHolder values) { Docs(SinglePackedOrdinals parent, ValuesHolder values) {
this.maxOrd = parent.valueCount; this.maxOrd = parent.valueCount;
this.reader = parent.reader; this.reader = parent.reader;
@ -84,8 +88,20 @@ public class SinglePackedOrdinals extends Ordinals {
} }
@Override @Override
public int getOrd(int docID) { public int ordValue() {
return (int) (reader.get(docID) - 1); return currentOrd;
}
@Override
public boolean advanceExact(int docID) throws IOException {
currentDoc = docID;
currentOrd = (int) (reader.get(docID) - 1);
return currentOrd != -1;
}
@Override
public int docID() {
return currentDoc;
} }
} }
} }

View file

@ -59,7 +59,7 @@ public abstract class AbstractAtomicGeoPointFieldData implements AtomicGeoPointF
@Override @Override
public MultiGeoPointValues getGeoPointValues() { public MultiGeoPointValues getGeoPointValues() {
return FieldData.emptyMultiGeoPoints(maxDoc); return FieldData.emptyMultiGeoPoints();
} }
}; };
} }

View file

@ -20,7 +20,7 @@
package org.elasticsearch.index.fielddata.plain; package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.FieldData;
@ -34,13 +34,13 @@ import java.util.function.Function;
public abstract class AbstractAtomicOrdinalsFieldData implements AtomicOrdinalsFieldData { public abstract class AbstractAtomicOrdinalsFieldData implements AtomicOrdinalsFieldData {
public static final Function<RandomAccessOrds, ScriptDocValues<?>> DEFAULT_SCRIPT_FUNCTION = public static final Function<SortedSetDocValues, ScriptDocValues<?>> DEFAULT_SCRIPT_FUNCTION =
((Function<RandomAccessOrds, SortedBinaryDocValues>) FieldData::toString) ((Function<SortedSetDocValues, SortedBinaryDocValues>) FieldData::toString)
.andThen(ScriptDocValues.Strings::new); .andThen(ScriptDocValues.Strings::new);
private final Function<RandomAccessOrds, ScriptDocValues<?>> scriptFunction; private final Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction;
protected AbstractAtomicOrdinalsFieldData(Function<RandomAccessOrds, ScriptDocValues<?>> scriptFunction) { protected AbstractAtomicOrdinalsFieldData(Function<SortedSetDocValues, ScriptDocValues<?>> scriptFunction) {
this.scriptFunction = scriptFunction; this.scriptFunction = scriptFunction;
} }
@ -72,7 +72,7 @@ public abstract class AbstractAtomicOrdinalsFieldData implements AtomicOrdinalsF
} }
@Override @Override
public RandomAccessOrds getOrdinalsValues() { public SortedSetDocValues getOrdinalsValues() {
return DocValues.emptySortedSet(); return DocValues.emptySortedSet();
} }
}; };

View file

@ -28,6 +28,7 @@ import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import java.io.IOException;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Set; import java.util.Set;
@ -46,16 +47,28 @@ abstract class AbstractAtomicParentChildFieldData implements AtomicParentChildFi
public final SortedBinaryDocValues getBytesValues() { public final SortedBinaryDocValues getBytesValues() {
return new SortedBinaryDocValues() { return new SortedBinaryDocValues() {
private final SortedDocValues[] perTypeValues;
private final BytesRef[] terms = new BytesRef[2]; private final BytesRef[] terms = new BytesRef[2];
private int count; private int count;
private int termsCursor;
{
Set<String> types = types();
perTypeValues = new SortedDocValues[types.size()];
int i = 0;
for (String type : types) {
perTypeValues[i++] = getOrdinalsValues(type);
}
}
@Override @Override
public void setDocument(int docId) { public boolean advanceExact(int docId) throws IOException {
count = 0; count = 0;
for (String type : types()) { termsCursor = 0;
final SortedDocValues values = getOrdinalsValues(type);
final int ord = values.getOrd(docId); for (SortedDocValues values : perTypeValues) {
if (ord >= 0) { if (values.advanceExact(docId)) {
final int ord = values.ordValue();
terms[count++] = values.lookupOrd(ord); terms[count++] = values.lookupOrd(ord);
} }
} }
@ -69,16 +82,17 @@ abstract class AbstractAtomicParentChildFieldData implements AtomicParentChildFi
count = 1; count = 1;
} }
} }
return count != 0;
} }
@Override @Override
public int count() { public int docValueCount() {
return count; return count;
} }
@Override @Override
public BytesRef valueAt(int index) { public BytesRef nextValue() throws IOException {
return terms[index]; return terms[termsCursor++];
} }
}; };
} }

View file

@ -1,83 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoPointFieldData {
AbstractGeoPointDVIndexFieldData(Index index, String fieldName) {
super(index, fieldName);
}
@Override
public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) {
throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance");
}
/**
* Lucene 5.4 GeoPointFieldType
*/
public static class GeoPointDVIndexFieldData extends AbstractGeoPointDVIndexFieldData {
public GeoPointDVIndexFieldData(Index index, String fieldName) {
super(index, fieldName);
}
@Override
public AtomicGeoPointFieldData load(LeafReaderContext context) {
try {
return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldName));
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
}
}
@Override
public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception {
return load(context);
}
}
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore breaker
return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name());
}
}
}

View file

@ -1,117 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.search.SortField;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.LegacyNumericUtils;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<AtomicGeoPointFieldData> implements IndexGeoPointFieldData {
protected abstract static class BaseGeoPointTermsEnum {
protected final BytesRefIterator termsEnum;
protected BaseGeoPointTermsEnum(BytesRefIterator termsEnum) {
this.termsEnum = termsEnum;
}
}
protected static class GeoPointTermsEnum extends BaseGeoPointTermsEnum {
private final GeoPointField.TermEncoding termEncoding;
protected GeoPointTermsEnum(BytesRefIterator termsEnum, GeoPointField.TermEncoding termEncoding) {
super(termsEnum);
this.termEncoding = termEncoding;
}
public Long next() throws IOException {
final BytesRef term = termsEnum.next();
if (term == null) {
return null;
}
if (termEncoding == GeoPointField.TermEncoding.PREFIX) {
return GeoPointField.prefixCodedToGeoCoded(term);
} else if (termEncoding == GeoPointField.TermEncoding.NUMERIC) {
return LegacyNumericUtils.prefixCodedToLong(term);
}
throw new IllegalArgumentException("GeoPoint.TermEncoding should be one of: " + GeoPointField.TermEncoding.PREFIX
+ " or " + GeoPointField.TermEncoding.NUMERIC + " found: " + termEncoding);
}
}
protected static class GeoPointTermsEnumLegacy extends BaseGeoPointTermsEnum {
private final GeoPoint next;
private final CharsRefBuilder spare;
protected GeoPointTermsEnumLegacy(BytesRefIterator termsEnum) {
super(termsEnum);
next = new GeoPoint();
spare = new CharsRefBuilder();
}
public GeoPoint next() throws IOException {
final BytesRef term = termsEnum.next();
if (term == null) {
return null;
}
spare.copyUTF8Bytes(term);
int commaIndex = -1;
for (int i = 0; i < spare.length(); i++) {
if (spare.charAt(i) == ',') { // saves a string creation
commaIndex = i;
break;
}
}
if (commaIndex == -1) {
assert false;
return next.reset(0, 0);
}
final double lat = Double.parseDouble(new String(spare.chars(), 0, commaIndex));
final double lon = Double.parseDouble(new String(spare.chars(), commaIndex + 1, spare.length() - (commaIndex + 1)));
return next.reset(lat, lon);
}
}
AbstractIndexGeoPointFieldData(IndexSettings indexSettings, String fieldName, IndexFieldDataCache cache) {
super(indexSettings, fieldName, cache);
}
@Override
public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) {
throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance");
}
@Override
protected AtomicGeoPointFieldData empty(int maxDoc) {
return AbstractAtomicGeoPointFieldData.empty(maxDoc);
}
}

View file

@ -19,13 +19,11 @@
package org.elasticsearch.index.fielddata.plain; package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfo;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
@ -38,8 +36,6 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndexFieldData public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndexFieldData
implements IndexGeoPointFieldData { implements IndexGeoPointFieldData {
AbstractLatLonPointDVIndexFieldData(Index index, String fieldName) { AbstractLatLonPointDVIndexFieldData(Index index, String fieldName) {
@ -58,16 +54,12 @@ public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndex
@Override @Override
public AtomicGeoPointFieldData load(LeafReaderContext context) { public AtomicGeoPointFieldData load(LeafReaderContext context) {
try { LeafReader reader = context.reader();
LeafReader reader = context.reader(); FieldInfo info = reader.getFieldInfos().fieldInfo(fieldName);
FieldInfo info = reader.getFieldInfos().fieldInfo(fieldName); if (info != null) {
if (info != null) { checkCompatible(info);
checkCompatible(info);
}
return new LatLonPointDVAtomicFieldData(DocValues.getSortedNumeric(reader, fieldName));
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
} }
return new LatLonPointDVAtomicFieldData(reader, fieldName);
} }
@Override @Override

View file

@ -67,7 +67,7 @@ abstract class AtomicDoubleFieldData implements AtomicNumericFieldData {
@Override @Override
public SortedNumericDoubleValues getDoubleValues() { public SortedNumericDoubleValues getDoubleValues() {
return FieldData.emptySortedNumericDoubles(maxDoc); return FieldData.emptySortedNumericDoubles();
} }
@Override @Override

View file

@ -23,7 +23,6 @@ import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Bits;
import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.AtomicFieldData;
import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.ScriptDocValues;
@ -49,8 +48,7 @@ public class BinaryDVAtomicFieldData implements AtomicFieldData {
public SortedBinaryDocValues getBytesValues() { public SortedBinaryDocValues getBytesValues() {
try { try {
final BinaryDocValues values = DocValues.getBinary(reader, field); final BinaryDocValues values = DocValues.getBinary(reader, field);
final Bits docsWithField = DocValues.getDocsWithField(reader, field); return FieldData.singleton(values);
return FieldData.singleton(values, docsWithField);
} catch (IOException e) { } catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e); throw new IllegalStateException("Cannot load doc values", e);
} }

Some files were not shown because too many files have changed in this diff Show more